aboutsummaryrefslogtreecommitdiff
path: root/arch/s390/include/asm/switch_to.h
blob: e759181357fc5823c490696c23d047878c4ec753 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
/*
 * Copyright IBM Corp. 1999, 2009
 *
 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
 */

#ifndef __ASM_SWITCH_TO_H
#define __ASM_SWITCH_TO_H

#include <linux/thread_info.h>
#include <asm/ptrace.h>

extern struct task_struct *__switch_to(void *, void *);
extern void update_cr_regs(struct task_struct *task);

static inline int test_fp_ctl(u32 fpc)
{
	u32 orig_fpc;
	int rc;

	if (!MACHINE_HAS_IEEE)
		return 0;

	asm volatile(
		"	efpc    %1\n"
		"	sfpc	%2\n"
		"0:	sfpc	%1\n"
		"	la	%0,0\n"
		"1:\n"
		EX_TABLE(0b,1b)
		: "=d" (rc), "=d" (orig_fpc)
		: "d" (fpc), "0" (-EINVAL));
	return rc;
}

static inline void save_fp_ctl(u32 *fpc)
{
	if (!MACHINE_HAS_IEEE)
		return;

	asm volatile(
		"       stfpc   %0\n"
		: "+Q" (*fpc));
}

static inline int restore_fp_ctl(u32 *fpc)
{
	int rc;

	if (!MACHINE_HAS_IEEE)
		return 0;

	asm volatile(
		"0:	lfpc    %1\n"
		"	la	%0,0\n"
		"1:\n"
		EX_TABLE(0b,1b)
		: "=d" (rc) : "Q" (*fpc), "0" (-EINVAL));
	return rc;
}

static inline void save_fp_regs(freg_t *fprs)
{
	asm volatile("std 0,%0" : "=Q" (fprs[0]));
	asm volatile("std 2,%0" : "=Q" (fprs[2]));
	asm volatile("std 4,%0" : "=Q" (fprs[4]));
	asm volatile("std 6,%0" : "=Q" (fprs[6]));
	if (!MACHINE_HAS_IEEE)
		return;
	asm volatile("std 1,%0" : "=Q" (fprs[1]));
	asm volatile("std 3,%0" : "=Q" (fprs[3]));
	asm volatile("std 5,%0" : "=Q" (fprs[5]));
	asm volatile("std 7,%0" : "=Q" (fprs[7]));
	asm volatile("std 8,%0" : "=Q" (fprs[8]));
	asm volatile("std 9,%0" : "=Q" (fprs[9]));
	asm volatile("std 10,%0" : "=Q" (fprs[10]));
	asm volatile("std 11,%0" : "=Q" (fprs[11]));
	asm volatile("std 12,%0" : "=Q" (fprs[12]));
	asm volatile("std 13,%0" : "=Q" (fprs[13]));
	asm volatile("std 14,%0" : "=Q" (fprs[14]));
	asm volatile("std 15,%0" : "=Q" (fprs[15]));
}

static inline void restore_fp_regs(freg_t *fprs)
{
	asm volatile("ld 0,%0" : : "Q" (fprs[0]));
	asm volatile("ld 2,%0" : : "Q" (fprs[2]));
	asm volatile("ld 4,%0" : : "Q" (fprs[4]));
	asm volatile("ld 6,%0" : : "Q" (fprs[6]));
	if (!MACHINE_HAS_IEEE)
		return;
	asm volatile("ld 1,%0" : : "Q" (fprs[1]));
	asm volatile("ld 3,%0" : : "Q" (fprs[3]));
	asm volatile("ld 5,%0" : : "Q" (fprs[5]));
	asm volatile("ld 7,%0" : : "Q" (fprs[7]));
	asm volatile("ld 8,%0" : : "Q" (fprs[8]));
	asm volatile("ld 9,%0" : : "Q" (fprs[9]));
	asm volatile("ld 10,%0" : : "Q" (fprs[10]));
	asm volatile("ld 11,%0" : : "Q" (fprs[11]));
	asm volatile("ld 12,%0" : : "Q" (fprs[12]));
	asm volatile("ld 13,%0" : : "Q" (fprs[13]));
	asm volatile("ld 14,%0" : : "Q" (fprs[14]));
	asm volatile("ld 15,%0" : : "Q" (fprs[15]));
}

static inline void save_access_regs(unsigned int *acrs)
{
	typedef struct { int _[NUM_ACRS]; } acrstype;

	asm volatile("stam 0,15,%0" : "=Q" (*(acrstype *)acrs));
}

static inline void restore_access_regs(unsigned int *acrs)
{
	typedef struct { int _[NUM_ACRS]; } acrstype;

	asm volatile("lam 0,15,%0" : : "Q" (*(acrstype *)acrs));
}

#define switch_to(prev,next,last) do {					\
	if (prev->mm) {							\
		save_fp_ctl(&prev->thread.fp_regs.fpc);			\
		save_fp_regs(prev->thread.fp_regs.fprs);		\
		save_access_regs(&prev->thread.acrs[0]);		\
		save_ri_cb(prev->thread.ri_cb);				\
	}								\
	if (next->mm) {							\
		restore_fp_ctl(&next->thread.fp_regs.fpc);		\
		restore_fp_regs(next->thread.fp_regs.fprs);		\
		restore_access_regs(&next->thread.acrs[0]);		\
		restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb);	\
		update_cr_regs(next);					\
	}								\
	prev = __switch_to(prev,next);					\
	update_primary_asce(current);					\
} while (0)

#define finish_arch_switch(prev) do {					     \
	set_fs(current->thread.mm_segment);				     \
} while (0)

#endif /* __ASM_SWITCH_TO_H */