Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | /* |
| 3 | * x86 TSC related functions |
| 4 | */ |
| 5 | #ifndef _ASM_X86_TSC_H |
| 6 | #define _ASM_X86_TSC_H |
| 7 | |
| 8 | #include <asm/processor.h> |
| 9 | |
| 10 | #define NS_SCALE 10 /* 2^10, carefully chosen */ |
| 11 | #define US_SCALE 32 /* 2^32, arbitralrily chosen */ |
| 12 | |
| 13 | /* |
| 14 | * Standard way to access the cycle counter. |
| 15 | */ |
| 16 | typedef unsigned long long cycles_t; |
| 17 | |
| 18 | extern unsigned int cpu_khz; |
| 19 | extern unsigned int tsc_khz; |
| 20 | |
| 21 | extern void disable_TSC(void); |
| 22 | |
| 23 | static inline cycles_t get_cycles(void) |
| 24 | { |
| 25 | #ifndef CONFIG_X86_TSC |
| 26 | if (!boot_cpu_has(X86_FEATURE_TSC)) |
| 27 | return 0; |
| 28 | #endif |
| 29 | |
| 30 | return rdtsc(); |
| 31 | } |
| 32 | |
| 33 | extern struct system_counterval_t convert_art_to_tsc(u64 art); |
| 34 | extern struct system_counterval_t convert_art_ns_to_tsc(u64 art_ns); |
| 35 | |
| 36 | extern void tsc_early_init(void); |
| 37 | extern void tsc_init(void); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 38 | extern unsigned long calibrate_delay_is_known(void); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 39 | extern void mark_tsc_unstable(char *reason); |
| 40 | extern int unsynchronized_tsc(void); |
| 41 | extern int check_tsc_unstable(void); |
| 42 | extern void mark_tsc_async_resets(char *reason); |
| 43 | extern unsigned long native_calibrate_cpu_early(void); |
| 44 | extern unsigned long native_calibrate_tsc(void); |
| 45 | extern unsigned long long native_sched_clock_from_tsc(u64 tsc); |
| 46 | |
| 47 | extern int tsc_clocksource_reliable; |
| 48 | #ifdef CONFIG_X86_TSC |
| 49 | extern bool tsc_async_resets; |
| 50 | #else |
| 51 | # define tsc_async_resets false |
| 52 | #endif |
| 53 | |
| 54 | /* |
| 55 | * Boot-time check whether the TSCs are synchronized across |
| 56 | * all CPUs/cores: |
| 57 | */ |
| 58 | #ifdef CONFIG_X86_TSC |
| 59 | extern bool tsc_store_and_check_tsc_adjust(bool bootcpu); |
| 60 | extern void tsc_verify_tsc_adjust(bool resume); |
| 61 | extern void check_tsc_sync_source(int cpu); |
| 62 | extern void check_tsc_sync_target(void); |
| 63 | #else |
| 64 | static inline bool tsc_store_and_check_tsc_adjust(bool bootcpu) { return false; } |
| 65 | static inline void tsc_verify_tsc_adjust(bool resume) { } |
| 66 | static inline void check_tsc_sync_source(int cpu) { } |
| 67 | static inline void check_tsc_sync_target(void) { } |
| 68 | #endif |
| 69 | |
| 70 | extern int notsc_setup(char *); |
| 71 | extern void tsc_save_sched_clock_state(void); |
| 72 | extern void tsc_restore_sched_clock_state(void); |
| 73 | |
| 74 | unsigned long cpu_khz_from_msr(void); |
| 75 | |
| 76 | #endif /* _ASM_X86_TSC_H */ |