Searched refs:mo (Results 1 - 25 of 57) sorted by relevance

123

/freebsd-11-stable/contrib/llvm-project/compiler-rt/include/sanitizer/
H A Dtsan_interface_atomic.h44 __tsan_memory_order mo);
46 __tsan_memory_order mo);
48 __tsan_memory_order mo);
50 __tsan_memory_order mo);
53 __tsan_memory_order mo);
57 __tsan_memory_order mo);
59 __tsan_memory_order mo);
61 __tsan_memory_order mo);
63 __tsan_memory_order mo);
66 __tsan_memory_order mo);
[all...]
/freebsd-11-stable/contrib/llvm-project/compiler-rt/lib/tsan/rtl/
H A Dtsan_interface_atomic.cpp35 static bool IsLoadOrder(morder mo) { argument
36 return mo == mo_relaxed || mo == mo_consume
37 || mo == mo_acquire || mo == mo_seq_cst;
40 static bool IsStoreOrder(morder mo) { argument
41 return mo == mo_relaxed || mo == mo_release || mo == mo_seq_cst;
44 static bool IsReleaseOrder(morder mo) { argument
48 IsAcquireOrder(morder mo) argument
53 IsAcqRelOrder(morder mo) argument
196 to_mo(morder mo) argument
210 NoTsanAtomicLoad(const volatile T *a, morder mo) argument
215 NoTsanAtomicLoad(const volatile a128 *a, morder mo) argument
222 AtomicLoad(ThreadState *thr, uptr pc, const volatile T *a, morder mo) argument
246 NoTsanAtomicStore(volatile T *a, T v, morder mo) argument
251 NoTsanAtomicStore(volatile a128 *a, a128 v, morder mo) argument
258 AtomicStore(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
281 AtomicRMW(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
303 NoTsanAtomicExchange(volatile T *a, T v, morder mo) argument
308 NoTsanAtomicFetchAdd(volatile T *a, T v, morder mo) argument
313 NoTsanAtomicFetchSub(volatile T *a, T v, morder mo) argument
318 NoTsanAtomicFetchAnd(volatile T *a, T v, morder mo) argument
323 NoTsanAtomicFetchOr(volatile T *a, T v, morder mo) argument
328 NoTsanAtomicFetchXor(volatile T *a, T v, morder mo) argument
333 NoTsanAtomicFetchNand(volatile T *a, T v, morder mo) argument
338 AtomicExchange(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
344 AtomicFetchAdd(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
350 AtomicFetchSub(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
356 AtomicFetchAnd(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
362 AtomicFetchOr(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
368 AtomicFetchXor(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
374 AtomicFetchNand(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) argument
380 NoTsanAtomicCAS(volatile T *a, T *c, T v, morder mo, morder fmo) argument
385 NoTsanAtomicCAS(volatile a128 *a, a128 *c, a128 v, morder mo, morder fmo) argument
397 NoTsanAtomicCAS(volatile T *a, T c, T v, morder mo, morder fmo) argument
403 AtomicCAS(ThreadState *thr, uptr pc, volatile T *a, T *c, T v, morder mo, morder fmo) argument
436 AtomicCAS(ThreadState *thr, uptr pc, volatile T *a, T c, T v, morder mo, morder fmo) argument
443 NoTsanAtomicFence(morder mo) argument
447 AtomicFence(ThreadState *thr, uptr pc, morder mo) argument
458 convert_morder(morder mo) argument
491 ScopedAtomic(ThreadState *thr, uptr pc, const volatile void *a, morder mo, const char *func) argument
505 AtomicStatInc(ThreadState *thr, uptr size, morder mo, StatType t) argument
523 __tsan_atomic8_load(const volatile a8 *a, morder mo) argument
528 __tsan_atomic16_load(const volatile a16 *a, morder mo) argument
533 __tsan_atomic32_load(const volatile a32 *a, morder mo) argument
538 __tsan_atomic64_load(const volatile a64 *a, morder mo) argument
544 __tsan_atomic128_load(const volatile a128 *a, morder mo) argument
550 __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo) argument
555 __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo) argument
560 __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo) argument
565 __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo) argument
571 __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo) argument
577 __tsan_atomic8_exchange(volatile a8 *a, a8 v, morder mo) argument
582 __tsan_atomic16_exchange(volatile a16 *a, a16 v, morder mo) argument
587 __tsan_atomic32_exchange(volatile a32 *a, a32 v, morder mo) argument
592 __tsan_atomic64_exchange(volatile a64 *a, a64 v, morder mo) argument
598 __tsan_atomic128_exchange(volatile a128 *a, a128 v, morder mo) argument
604 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, morder mo) argument
609 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, morder mo) argument
614 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, morder mo) argument
619 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, morder mo) argument
625 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, morder mo) argument
631 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, morder mo) argument
636 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, morder mo) argument
641 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, morder mo) argument
646 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, morder mo) argument
652 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, morder mo) argument
658 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, morder mo) argument
663 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, morder mo) argument
668 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, morder mo) argument
673 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, morder mo) argument
679 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, morder mo) argument
685 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, morder mo) argument
690 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, morder mo) argument
695 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, morder mo) argument
700 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, morder mo) argument
706 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, morder mo) argument
712 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, morder mo) argument
717 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, morder mo) argument
722 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, morder mo) argument
727 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, morder mo) argument
733 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, morder mo) argument
739 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, morder mo) argument
744 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, morder mo) argument
749 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, morder mo) argument
754 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, morder mo) argument
760 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, morder mo) argument
766 __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v, morder mo, morder fmo) argument
772 __tsan_atomic16_compare_exchange_strong(volatile a16 *a, a16 *c, a16 v, morder mo, morder fmo) argument
778 __tsan_atomic32_compare_exchange_strong(volatile a32 *a, a32 *c, a32 v, morder mo, morder fmo) argument
784 __tsan_atomic64_compare_exchange_strong(volatile a64 *a, a64 *c, a64 v, morder mo, morder fmo) argument
791 __tsan_atomic128_compare_exchange_strong(volatile a128 *a, a128 *c, a128 v, morder mo, morder fmo) argument
798 __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v, morder mo, morder fmo) argument
804 __tsan_atomic16_compare_exchange_weak(volatile a16 *a, a16 *c, a16 v, morder mo, morder fmo) argument
810 __tsan_atomic32_compare_exchange_weak(volatile a32 *a, a32 *c, a32 v, morder mo, morder fmo) argument
816 __tsan_atomic64_compare_exchange_weak(volatile a64 *a, a64 *c, a64 v, morder mo, morder fmo) argument
823 __tsan_atomic128_compare_exchange_weak(volatile a128 *a, a128 *c, a128 v, morder mo, morder fmo) argument
830 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v, morder mo, morder fmo) argument
836 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v, morder mo, morder fmo) argument
842 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v, morder mo, morder fmo) argument
848 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v, morder mo, morder fmo) argument
855 __tsan_atomic128_compare_exchange_val(volatile a128 *a, a128 c, a128 v, morder mo, morder fmo) argument
862 __tsan_atomic_thread_fence(morder mo) argument
868 __tsan_atomic_signal_fence(morder mo) argument
[all...]
H A Dtsan_interface.h221 a8 __tsan_atomic8_load(const volatile a8 *a, morder mo);
223 a16 __tsan_atomic16_load(const volatile a16 *a, morder mo);
225 a32 __tsan_atomic32_load(const volatile a32 *a, morder mo);
227 a64 __tsan_atomic64_load(const volatile a64 *a, morder mo);
230 a128 __tsan_atomic128_load(const volatile a128 *a, morder mo);
234 void __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo);
236 void __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo);
238 void __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo);
240 void __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo);
243 void __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo);
[all...]
H A Dtsan_interceptors_mac.cpp53 #define OSATOMIC_INTERCEPTOR(return_t, t, tsan_t, f, tsan_atomic_f, mo) \
56 return tsan_atomic_f((volatile tsan_t *)ptr, x, mo); \
59 #define OSATOMIC_INTERCEPTOR_PLUS_X(return_t, t, tsan_t, f, tsan_atomic_f, mo) \
62 return tsan_atomic_f((volatile tsan_t *)ptr, x, mo) + x; \
65 #define OSATOMIC_INTERCEPTOR_PLUS_1(return_t, t, tsan_t, f, tsan_atomic_f, mo) \
68 return tsan_atomic_f((volatile tsan_t *)ptr, 1, mo) + 1; \
72 mo) \
75 return tsan_atomic_f((volatile tsan_t *)ptr, 1, mo) - 1; \
137 #define OSATOMIC_INTERCEPTOR_BITOP(f, op, clear, mo) \
143 char orig_byte = op((volatile a8 *)byte_ptr, mask, mo); \
[all...]
/freebsd-11-stable/contrib/llvm-project/compiler-rt/lib/sanitizer_common/
H A Dsanitizer_atomic_clang.h47 typename T::Type v, memory_order mo) {
48 (void)mo;
55 typename T::Type v, memory_order mo) {
56 (void)mo;
63 typename T::Type v, memory_order mo) {
65 if (mo & (memory_order_release | memory_order_acq_rel | memory_order_seq_cst))
68 if (mo == memory_order_seq_cst)
76 memory_order mo) {
90 memory_order mo) {
91 return atomic_compare_exchange_strong(a, cmp, xchg, mo);
46 atomic_fetch_add(volatile T *a, typename T::Type v, memory_order mo) argument
54 atomic_fetch_sub(volatile T *a, typename T::Type v, memory_order mo) argument
62 atomic_exchange(volatile T *a, typename T::Type v, memory_order mo) argument
74 atomic_compare_exchange_strong(volatile T *a, typename T::Type *cmp, typename T::Type xchg, memory_order mo) argument
87 atomic_compare_exchange_weak(volatile T *a, typename T::Type *cmp, typename T::Type xchg, memory_order mo) argument
[all...]
H A Dsanitizer_atomic_clang_x86.h28 const volatile T *a, memory_order mo) {
29 DCHECK(mo & (memory_order_relaxed | memory_order_consume
36 if (mo == memory_order_relaxed) {
38 } else if (mo == memory_order_consume) {
44 } else if (mo == memory_order_acquire) {
73 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) { argument
74 DCHECK(mo & (memory_order_relaxed | memory_order_release
80 if (mo == memory_order_relaxed) {
82 } else if (mo == memory_order_release) {
106 if (mo
27 atomic_load( const volatile T *a, memory_order mo) argument
[all...]
H A Dsanitizer_atomic_msvc.h72 const volatile T *a, memory_order mo) {
73 DCHECK(mo & (memory_order_relaxed | memory_order_consume
78 if (mo == memory_order_relaxed) {
89 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) { argument
90 DCHECK(mo & (memory_order_relaxed | memory_order_release
94 if (mo == memory_order_relaxed) {
101 if (mo == memory_order_seq_cst)
106 u32 v, memory_order mo) {
107 (void)mo;
114 uptr v, memory_order mo) {
71 atomic_load( const volatile T *a, memory_order mo) argument
105 atomic_fetch_add(volatile atomic_uint32_t *a, u32 v, memory_order mo) argument
113 atomic_fetch_add(volatile atomic_uintptr_t *a, uptr v, memory_order mo) argument
126 atomic_fetch_sub(volatile atomic_uint32_t *a, u32 v, memory_order mo) argument
134 atomic_fetch_sub(volatile atomic_uintptr_t *a, uptr v, memory_order mo) argument
147 atomic_exchange(volatile atomic_uint8_t *a, u8 v, memory_order mo) argument
154 atomic_exchange(volatile atomic_uint16_t *a, u16 v, memory_order mo) argument
161 atomic_exchange(volatile atomic_uint32_t *a, u32 v, memory_order mo) argument
168 atomic_compare_exchange_strong(volatile atomic_uint8_t *a, u8 *cmp, u8 xchgv, memory_order mo) argument
194 atomic_compare_exchange_strong(volatile atomic_uintptr_t *a, uptr *cmp, uptr xchg, memory_order mo) argument
207 atomic_compare_exchange_strong(volatile atomic_uint16_t *a, u16 *cmp, u16 xchg, memory_order mo) argument
220 atomic_compare_exchange_strong(volatile atomic_uint32_t *a, u32 *cmp, u32 xchg, memory_order mo) argument
233 atomic_compare_exchange_strong(volatile atomic_uint64_t *a, u64 *cmp, u64 xchg, memory_order mo) argument
247 atomic_compare_exchange_weak(volatile T *a, typename T::Type *cmp, typename T::Type xchg, memory_order mo) argument
[all...]
H A Dsanitizer_atomic_clang_other.h26 const volatile T *a, memory_order mo) {
27 DCHECK(mo & (memory_order_relaxed | memory_order_consume
34 if (mo == memory_order_relaxed) {
36 } else if (mo == memory_order_consume) {
42 } else if (mo == memory_order_acquire) {
63 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) { argument
64 DCHECK(mo & (memory_order_relaxed | memory_order_release
70 if (mo == memory_order_relaxed) {
72 } else if (mo == memory_order_release) {
25 atomic_load( const volatile T *a, memory_order mo) argument
H A Dsanitizer_atomic_clang_mips.h42 memory_order mo) {
43 DCHECK(mo &
60 memory_order mo) {
61 return atomic_fetch_add(ptr, -val, mo);
68 memory_order mo) {
69 DCHECK(mo &
91 memory_order mo) {
92 DCHECK(mo &
99 return atomic_fetch_add(Newptr, zero, mo);
104 memory_order mo) {
40 atomic_fetch_add(volatile atomic_uint64_t *ptr, atomic_uint64_t::Type val, memory_order mo) argument
58 atomic_fetch_sub(volatile atomic_uint64_t *ptr, atomic_uint64_t::Type val, memory_order mo) argument
65 atomic_compare_exchange_strong(volatile atomic_uint64_t *ptr, atomic_uint64_t::Type *cmp, atomic_uint64_t::Type xchg, memory_order mo) argument
90 atomic_load(const volatile atomic_uint64_t *ptr, memory_order mo) argument
103 atomic_store(volatile atomic_uint64_t *ptr, atomic_uint64_t::Type v, memory_order mo) argument
[all...]
/freebsd-11-stable/contrib/amd/amd/
H A Drestart.c51 am_opts mo; local
57 memset(&mo, 0, sizeof(mo));
58 mo.opt_rhost = NULL;
59 mo.opt_rfs = NULL;
63 mo.opt_rhost = xstrdup(me->mnt_fsname);
64 mo.opt_rfs = xstrdup(cp + 1);
71 mo.opt_rhost = xstrdup(me->mnt_fsname);
72 mo.opt_rfs = xstrdup("/");
73 me->mnt_fsname = str3cat(me->mnt_fsname, mo
[all...]
H A Dmntfs.c76 init_mntfs(mntfs *mf, am_ops *ops, am_opts *mo, char *mp, char *info, char *auto_opts, char *mopts, char *remopts) argument
81 if (mo)
82 mf->mf_fo = copy_opts(mo);
105 alloc_mntfs(am_ops *ops, am_opts *mo, char *mp, char *info, char *auto_opts, char *mopts, char *remopts) argument
109 init_mntfs(mf, ops, mo, mp, info, auto_opts, mopts, remopts);
119 locate_mntfs(am_ops *ops, am_opts *mo, char *mp, char *info, char *auto_opts, char *mopts, char *remopts) argument
201 find_mntfs(am_ops *ops, am_opts *mo, char *mp, char *info, char *auto_opts, char *mopts, char *remopts) argument
203 mntfs *mf = locate_mntfs(ops, mo, mp, info, auto_opts, mopts, remopts);
207 return alloc_mntfs(ops, mo, mp, info, auto_opts, mopts, remopts);
383 realloc_mntfs(mntfs *mf, am_ops *ops, am_opts *mo, cha argument
[all...]
/freebsd-11-stable/sbin/mount_fusefs/
H A Dmount_fusefs.c124 struct mntopt *mo; local
196 for (mo = mopts; mo->m_flag; ++mo) {
199 if (mo->m_flag != mv->mv_flag)
201 p = strstr(optarg, mo->m_option);
203 p += strlen(mo->m_option);
274 for (mo = mopts; mo->m_flag; ++mo) {
445 struct mntopt *mo; local
[all...]
/freebsd-11-stable/usr.sbin/ppp/
H A Ddeflate.c77 struct mbuf *mo_head, *mo, *mi_head, *mi; local
97 mo_head = mo = m_get(DEFLATE_CHUNK_LEN, MB_CCPOUT);
98 mo->m_len = 2;
99 wp = MBUF_CTOP(mo);
137 mo->m_next = m_get(DEFLATE_CHUNK_LEN, MB_CCPOUT);
138 olen += (mo->m_len = DEFLATE_CHUNK_LEN);
139 mo = mo->m_next;
140 mo->m_len = 0;
141 state->cx.next_out = MBUF_CTOP(mo);
204 struct mbuf *mo, *mo_head, *mi_head; local
[all...]
/freebsd-11-stable/sys/i386/i386/
H A Dmem.c188 struct mem_range_op *mo = (struct mem_range_op *)data; local
206 nd = imin(mo->mo_arg[0], mem_range_softc.mr_ndesc);
213 error = copyout(md, mo->mo_desc,
219 mo->mo_arg[0] = nd;
225 error = copyin(mo->mo_desc, md, sizeof(struct mem_range_desc));
229 error = mem_range_attr_set(md, &mo->mo_arg[0]);
/freebsd-11-stable/sys/amd64/amd64/
H A Dmem.c192 struct mem_range_op *mo = (struct mem_range_op *)data; local
210 nd = imin(mo->mo_arg[0], mem_range_softc.mr_ndesc);
217 error = copyout(md, mo->mo_desc,
223 mo->mo_arg[0] = nd;
229 error = copyin(mo->mo_desc, md, sizeof(struct mem_range_desc));
233 error = mem_range_attr_set(md, &mo->mo_arg[0]);
/freebsd-11-stable/contrib/llvm-project/llvm/lib/CodeGen/
H A DExecutionDomainFix.cpp262 MachineOperand &mo = mi->getOperand(i);
263 if (!mo.isReg())
265 for (int rx : regIndices(mo.getReg())) {
272 MachineOperand &mo = mi->getOperand(i);
273 if (!mo.isReg())
275 for (int rx : regIndices(mo.getReg())) {
293 MachineOperand &mo = mi->getOperand(i);
294 if (!mo.isReg())
296 for (int rx : regIndices(mo.getReg())) {
383 for (MachineOperand &mo
[all...]
/freebsd-11-stable/sys/sys/
H A Dpmckern.h111 #define PMC_SOFT_CALL(pr, mo, fu, na) \
113 if (__predict_false(pmc_##pr##_##mo##_##fu##_##na.ps_running)) { \
118 ks.pm_ev = pmc_##pr##_##mo##_##fu##_##na.ps_ev.pm_ev_code; \
127 #define PMC_SOFT_CALL(pr, mo, fu, na) \
136 #define PMC_SOFT_CALL_TF(pr, mo, fu, na, tf) \
138 if (__predict_false(pmc_##pr##_##mo##_##fu##_##na.ps_running)) { \
142 ks.pm_ev = pmc_##pr##_##mo##_##fu##_##na.ps_ev.pm_ev_code; \
/freebsd-11-stable/sys/powerpc/powerpc/
H A Dmem.c269 struct mem_range_op *mo = (struct mem_range_op *)data; local
287 nd = imin(mo->mo_arg[0], mem_range_softc.mr_ndesc);
294 error = copyout(md, mo->mo_desc,
300 mo->mo_arg[0] = nd;
306 error = copyin(mo->mo_desc, md, sizeof(struct mem_range_desc));
310 error = mem_range_attr_set(md, &mo->mo_arg[0]);
/freebsd-11-stable/sys/dev/cxgb/common/
H A Dcxgb_vsc7323.c56 const struct mdio_ops *mo = adapter_info(adap)->mdio_ops; local
59 ret = mo->write(adap, ELMR_MDIO_ADDR, 0, ELMR_ADDR, start);
61 ret = mo->write(adap, ELMR_MDIO_ADDR, 0, ELMR_DATA_LO,
64 ret = mo->write(adap, ELMR_MDIO_ADDR, 0, ELMR_DATA_HI,
80 const struct mdio_ops *mo = adapter_info(adap)->mdio_ops; local
84 ret = mo->write(adap, ELMR_MDIO_ADDR, 0, ELMR_ADDR, start);
89 ret = mo->read(adap, ELMR_MDIO_ADDR, 0, ELMR_STAT, &v);
102 ret = mo->read(adap, ELMR_MDIO_ADDR, 0, ELMR_DATA_LO, vals);
104 ret = mo->read(adap, ELMR_MDIO_ADDR, 0, ELMR_DATA_HI,
/freebsd-11-stable/contrib/libxo/tests/gettext/
H A DMakefile.am82 test tests: ${bin_PROGRAMS} build-mo-files
211 mo build-mo-files:
216 (${DB} ${MSGFMT} -o po/$$lang/LC_MESSAGES/$$base.mo \
219 (${DB} ${MSGFMT} -o po/$$lang/LC_MESSAGES/$$base.mo \
/freebsd-11-stable/contrib/gcc/
H A Dvar-tracking.c1643 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
1648 mo->type = MO_USE;
1650 mo->type = MO_USE_NO_VAR;
1651 mo->u.loc = *loc;
1652 mo->insn = (rtx) insn;
1660 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
1662 mo->type = MO_USE;
1663 mo->u.loc = *loc;
1664 mo->insn = (rtx) insn;
1688 micro_operation *mo local
1712 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
2861 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
2892 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
2925 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; local
[all...]
/freebsd-11-stable/crypto/heimdal/lib/gssapi/mech/
H A Dgss_mo.c42 get_option_def(int def, gss_const_OID mech, gss_mo_desc *mo, gss_buffer_t value) argument
48 _gss_mo_get_option_1(gss_const_OID mech, gss_mo_desc *mo, gss_buffer_t value) argument
50 return get_option_def(1, mech, mo, value);
54 _gss_mo_get_option_0(gss_const_OID mech, gss_mo_desc *mo, gss_buffer_t value) argument
56 return get_option_def(0, mech, mo, value);
60 _gss_mo_get_ctx_as_string(gss_const_OID mech, gss_mo_desc *mo, gss_buffer_t value) argument
63 value->value = strdup((char *)mo->ctx);
66 value->length = strlen((char *)mo->ctx);
/freebsd-11-stable/sys/kgssapi/krb5/
H A Dkrb5_mech.c756 krb5_insert_seq(struct krb5_msg_order *mo, uint32_t seq, int index) argument
760 if (mo->km_length < mo->km_jitter_window)
761 mo->km_length++;
763 for (i = mo->km_length - 1; i > index; i--)
764 mo->km_elem[i] = mo->km_elem[i - 1];
765 mo->km_elem[index] = seq;
775 struct krb5_msg_order *mo = &kc->kc_msg_order; local
776 int check_sequence = mo
[all...]
/freebsd-11-stable/sys/netipsec/
H A Dipsec_mbuf.c261 struct mbuf *mo; local
281 mo = m1->m_next;
291 m1->m_next = mo;
/freebsd-11-stable/contrib/dialog/po/
H A Dmakefile.inn60 .SUFFIXES: .c .o .po .pox .gmo .mo .msg .cat
69 .po.mo:
184 rm -f makefile makefile.in POTFILES *.mo *.msg *.cat *.cat.m

Completed in 279 milliseconds

123