aboutsummaryrefslogtreecommitdiffstats
<
/* Portions are: Copyright (c) 1994 Linus Torvalds */

#ifndef __ASM_X86_PROCESSOR_H
#define __ASM_X86_PROCESSOR_H

#ifndef __ASSEMBLY__
#include <xen/config.h>
#include <xen/cache.h>
#include <xen/types.h>
#include <xen/smp.h>
#include <xen/percpu.h>
#include <public/xen.h>
#include <asm/types.h>
#include <asm/cpufeature.h>
#include <asm/desc.h>
#endif

/*
 * CPU vendor IDs
 */
#define X86_VENDOR_INTEL 0
#define X86_VENDOR_CYRIX 1
#define X86_VENDOR_AMD 2
#define X86_VENDOR_UMC 3
#define X86_VENDOR_NEXGEN 4
#define X86_VENDOR_CENTAUR 5
#define X86_VENDOR_RISE 6
#define X86_VENDOR_TRANSMETA 7
#define X86_VENDOR_NSC 8
#define X86_VENDOR_NUM 9
#define X86_VENDOR_UNKNOWN 0xff

/*
 * EFLAGS bits
 */
#define X86_EFLAGS_CF	0x00000001 /* Carry Flag */
#define X86_EFLAGS_PF	0x00000004 /* Parity Flag */
#define X86_EFLAGS_AF	0x00000010 /* Auxillary carry Flag */
#define X86_EFLAGS_ZF	0x00000040 /* Zero Flag */
#define X86_EFLAGS_SF	0x00000080 /* Sign Flag */
#defi
Commit message (Collapse)AuthorAgeFilesLines
* abc: Improve name recoveryDavid Shah2019-05-041-4/+17
| | | | Signed-off-by: David Shah <dave@ds0.me>
* Improve opt_clean handling of unused wiresClifford Wolf2019-05-041-10/+22
| | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
* Add support for SVA "final" keywordClifford Wolf2019-05-042-1/+5
| | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
* Rename cells_map.v to prevent clash with ff_map.vEddie Hung2019-05-031-6/+8
|
* iverilog with simcells.v as wellEddie Hung2019-05-031-1/+2
|
* Merge pull request #969 from YosysHQ/clifford/pmgenstuffClifford Wolf2019-05-0313-151/+509
|\ | | | | Improve pmgen, Add "peepopt" pass with shift-mul pattern
| * Update pmgen documentationClifford Wolf2019-05-031-6/+18
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Fix typoClifford Wolf2019-05-031-1/+1
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Add peepopt_muldiv, fixes #930Clifford Wolf2019-04-306-1/+86
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * pmgen progressClifford Wolf2019-04-304-13/+27
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Run "peepopt" in generic "synth" pass and "synth_ice40"Clifford Wolf2019-04-302-0/+4
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Some pmgen reorg, rename peepopt.pmg to peepopt_shiftmul.pmgClifford Wolf2019-04-303-4/+6
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Progress in shiftmul peepopt patternClifford Wolf2019-04-301-3/+51
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Add "peepopt" skeletonClifford Wolf2019-04-295-1/+112
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Add pmgen support for multiple patterns in one matcherClifford Wolf2019-04-293-130/+188
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * Support multiple pmg files (right now just concatenated together)Clifford Wolf2019-04-291-6/+30
| | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
* | Merge pull request #984 from YosysHQ/eddie/fix_982Clifford Wolf2019-05-031-1/+2
|\ \ | | | | | | dffinit to do nothing when (* init *) value is 1'bx
| * | Revert "synth_xilinx to call dffinit with -noreinit"Eddie Hung2019-05-031-1/+1
| | | | | | | | | | | | This reverts commit 1f62dc9081feb4852b1848d01951f631853edb38.
| * | If init is 1'bx, do not add to dict as per @cliffordwolfEddie Hung2019-05-031-1/+2
| | |
| * | Revert "dffinit -noreinit to silently continue when init value is 1'bx"Eddie Hung2019-05-031-12/+4
| | | | | | | | | | | | This reverts commit aa081f83c791b1d666214776aaf744a80ce6a690.
| * | synth_xilinx to call dffinit with -noreinitEddie Hung2019-05-021-1/+1
| | |
| * | dffinit -noreinit to silently continue when init value is 1'bxEddie Hung2019-05-021-4/+12
| | |
* | | Merge pull request #976 from YosysHQ/clifford/fix974Clifford Wolf2019-05-033-0/+25
|\ \ \ | | | | | | | | Fix width detection of memory access with bit slice
| * | | Add splitcmplxassign test case and silence splitcmplxassign warningClifford Wolf2019-05-012-0/+23
| | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | Fix width detection of memory access with bit slice, fixes #974Clifford Wolf2019-05-011-0/+2
| | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at> 22-190/+286
| |\ \ \
| * | | | WIPEddie Hung2019-04-281-36/+22
| | | | |
| * | | | Move neg-pol to pos-pol mapping from ff_map to cells_map.vEddie Hung2019-04-282-9/+12
| | | | |
| * | | | Revert synth_xilinx 'fine' label more to how it used to be...Eddie Hung2019-04-261-21/+40
| | | | |
* | | | | Merge pull request #978 from ucb-bar/fmtfirrtlEddie Hung2019-05-011-25/+25
|\ \ \ \ \ | |_|/ / / |/| | | | Re-indent firrtl.cc:struct memory - no functional change.
| * | | | Re-indent firrtl.cc:struct memory - no functional change.Jim Lawson2019-05-011-25/+25
| | | | |
* | | | | Merge branch 'master' of github.com:YosysHQ/yosysEddie Hung2019-05-0121-176/+273
|\| | | |
| * | | | Merge branch 'clifford/fix883'Clifford Wolf2019-05-021-0/+1
| |\ \ \ \
| | * | | | Add missing enable_undef to "sat -tempinduct-def", fixes #883Clifford Wolf2019-05-021-0/+1
| |/ / / / | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | | Merge pull request #977 from ucb-bar/fixfirrtlmemClifford Wolf2019-05-013-4/+64
| |\ \ \ \ | | | | | | | | | | | | Fix #938 - Crash occurs in case when use write_firrtl command
| | * | | | Fix #938 - Crash occurs in case when use write_firrtl commandJim Lawson2019-05-013-4/+64
| | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | Add missing memory initialization. Sanity-check memory parameters. Add Cell pointer to memory object (for error reporting).
| * | | | | Fix floating point exception in qwp, fixes #923Clifford Wolf2019-05-011-1/+1
| | |_|/ / | |/| | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | | Fix segfault in wreduceClifford Wolf2019-04-301-0/+2
| |/ / / | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | Disabled "final loop assignment" featureClifford Wolf2019-04-301-0/+2
| | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | Merge pull request #972 from YosysHQ/clifford/fix968Clifford Wolf2019-04-301-0/+7
| |\ \ \ | | | | | | | | | | Add final loop variable assignment when unrolling for-loops
| | * | | Add final loop variable assignment when unrolling for-loops, fixes #968Clifford Wolf2019-04-301-0/+7
| | | | | | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
| * | | | Merge pull request #966 from YosysHQ/clifford/fix956Clifford Wolf2019-04-303-3/+55
| |\ \ \ \ | | | | | | | | | | | | Drive dangling wires with init attr with their init value
| | * | | | Add handling of init attributes in "opt_expr -undriven"Clifford Wolf2019-04-302-3/+42
| | | | | | | | | | | | | | | | | | | | | | | | Signed-off-by: Clifford Wolf <clifford@clifford.at>
>; asm ( "cpuid" : "=a" (eax), "=c" (ecx) : "0" (op) : "bx", "dx" ); return ecx; } static always_inline unsigned int cpuid_edx(unsigned int op) { unsigned int eax, edx; asm ( "cpuid" : "=a" (eax), "=d" (edx) : "0" (op) : "bx", "cx" ); return edx; } static inline unsigned long read_cr0(void) { unsigned long cr0; asm volatile ( "mov %%cr0,%0\n\t" : "=r" (cr0) ); return cr0; } static inline void write_cr0(unsigned long val) { asm volatile ( "mov %0,%%cr0" : : "r" ((unsigned long)val) ); } static inline unsigned long read_cr2(void) { unsigned long cr2; asm volatile ( "mov %%cr2,%0\n\t" : "=r" (cr2) ); return cr2; } DECLARE_PER_CPU(unsigned long, cr4); static inline unsigned long read_cr4(void) { return this_cpu(cr4); } static inline void write_cr4(unsigned long val) { this_cpu(cr4) = val; asm volatile ( "mov %0,%%cr4" : : "r" (val) ); } /* Clear and set 'TS' bit respectively */ static inline void clts(void) { asm volatile ( "clts" ); } static inline void stts(void) { write_cr0(X86_CR0_TS|read_cr0()); } /* * Save the cr4 feature set we're using (ie * Pentium 4MB enable and PPro Global page * enable), so that any CPU's that boot up * after us can get the correct flags. */ extern unsigned long mmu_cr4_features; static always_inline void set_in_cr4 (unsigned long mask) { mmu_cr4_features |= mask; write_cr4(read_cr4() | mask); } static always_inline void clear_in_cr4 (unsigned long mask) { mmu_cr4_features &= ~mask; write_cr4(read_cr4() & ~mask); } /* * NSC/Cyrix CPU configuration register indexes */ #define CX86_PCR0 0x20 #define CX86_GCR 0xb8 #define CX86_CCR0 0xc0 #define CX86_CCR1 0xc1 #define CX86_CCR2 0xc2 #define CX86_CCR3 0xc3 #define CX86_CCR4 0xe8 #define CX86_CCR5 0xe9 #define CX86_CCR6 0xea #define CX86_CCR7 0xeb #define CX86_PCR1 0xf0 #define CX86_DIR0 0xfe #define CX86_DIR1 0xff #define CX86_ARR_BASE 0xc4 #define CX86_RCR_BASE 0xdc /* * NSC/Cyrix CPU indexed register access macros */ #define getCx86(reg) ({ outb((reg), 0x22); inb(0x23); }) #define setCx86(reg, data) do { \ outb((reg), 0x22); \ outb((data), 0x23); \ } while (0) /* Stop speculative execution */ static inline void sync_core(void) { int tmp; asm volatile ( "cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory" ); } static always_inline void __monitor(const void *eax, unsigned long ecx, unsigned long edx) { /* "monitor %eax,%ecx,%edx;" */ asm volatile ( ".byte 0x0f,0x01,0xc8;" : : "a" (eax), "c" (ecx), "d"(edx) ); } static always_inline void __mwait(unsigned long eax, unsigned long ecx) { /* "mwait %eax,%ecx;" */ asm volatile ( ".byte 0x0f,0x01,0xc9;" : : "a" (eax), "c" (ecx) ); } #define IOBMP_BYTES 8192 #define IOBMP_INVALID_OFFSET 0x8000 struct tss_struct { unsigned short back_link,__blh; #ifdef __x86_64__ union { u64 rsp0, esp0; }; union { u64 rsp1, esp1; }; union { u64 rsp2, esp2; }; u64 reserved1; u64 ist[7]; u64 reserved2; u16 reserved3; #else u32 esp0; u16 ss0,__ss0h; u32 esp1; u16 ss1,__ss1h; u32 esp2; u16 ss2,__ss2h; u32 __cr3; u32 eip; u32 eflags; u32 eax,ecx,edx,ebx; u32 esp; u32 ebp; u32 esi; u32 edi; u16 es, __esh; u16 cs, __csh; u16 ss, __ssh; u16 ds, __dsh; u16 fs, __fsh; u16 gs, __gsh; u16 ldt, __ldth; u16 trace; #endif u16 bitmap; /* Pads the TSS to be cacheline-aligned (total size is 0x80). */ u8 __cacheline_filler[24]; } __cacheline_aligned __attribute__((packed)); #ifdef __x86_64__ # define IST_DF 1UL # define IST_NMI 2UL # define IST_MCE 3UL # define IST_MAX 3UL #endif #define IDT_ENTRIES 256 extern idt_entry_t idt_table[]; extern idt_entry_t *idt_tables[]; extern struct tss_struct init_tss[NR_CPUS]; extern void init_int80_direct_trap(struct vcpu *v); #if defined(CONFIG_X86_32) #define set_int80_direct_trap(_ed) \ (memcpy(idt_tables[(_ed)->processor] + 0x80, \ &((_ed)->arch.int80_desc), 8)) #else #define set_int80_direct_trap(_ed) ((void)0) #endif extern int gpf_emulate_4gb(struct cpu_user_regs *regs); extern void write_ptbase(struct vcpu *v); void destroy_gdt(struct vcpu *d); long set_gdt(struct vcpu *d, unsigned long *frames, unsigned int entries); #define write_debugreg(reg, val) do { \ unsigned long __val = val; \ asm volatile ( "mov %0,%%db" #reg : : "r" (__val) ); \ } while (0) #define read_debugreg(reg) ({ \ unsigned long __val; \ asm volatile ( "mov %%db" #reg ",%0" : "=r" (__val) ); \ __val; \ }) long set_debugreg(struct vcpu *p, int reg, unsigned long value); /* REP NOP (PAUSE) is a good thing to insert into busy-wait loops. */ static always_inline void rep_nop(void)