drm/radeon/kms: simplify memory controller setup V2
[safe/jmp/linux-2.6] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include "drmP.h"
27 #include "radeon.h"
28 #include "radeon_drm.h"
29 #include "rv770d.h"
30 #include "atom.h"
31 #include "avivod.h"
32 #include "evergreen_reg.h"
33
34 static void evergreen_gpu_init(struct radeon_device *rdev);
35 void evergreen_fini(struct radeon_device *rdev);
36
37 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
38 {
39         bool connected = false;
40         /* XXX */
41         return connected;
42 }
43
44 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
45                                 enum radeon_hpd_id hpd)
46 {
47         /* XXX */
48 }
49
50 void evergreen_hpd_init(struct radeon_device *rdev)
51 {
52         /* XXX */
53 }
54
55
56 void evergreen_bandwidth_update(struct radeon_device *rdev)
57 {
58         /* XXX */
59 }
60
61 void evergreen_hpd_fini(struct radeon_device *rdev)
62 {
63         /* XXX */
64 }
65
66 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
67 {
68         unsigned i;
69         u32 tmp;
70
71         for (i = 0; i < rdev->usec_timeout; i++) {
72                 /* read MC_STATUS */
73                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
74                 if (!tmp)
75                         return 0;
76                 udelay(1);
77         }
78         return -1;
79 }
80
81 /*
82  * GART
83  */
84 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
85 {
86         u32 tmp;
87         int r, i;
88
89         if (rdev->gart.table.vram.robj == NULL) {
90                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
91                 return -EINVAL;
92         }
93         r = radeon_gart_table_vram_pin(rdev);
94         if (r)
95                 return r;
96         radeon_gart_restore(rdev);
97         /* Setup L2 cache */
98         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
99                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
100                                 EFFECTIVE_L2_QUEUE_SIZE(7));
101         WREG32(VM_L2_CNTL2, 0);
102         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
103         /* Setup TLB control */
104         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
105                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
106                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
107                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
108         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
109         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
110         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
111         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
112         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
113         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
114         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
115         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
116         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
117         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
118         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
119                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
120         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
121                         (u32)(rdev->dummy_page.addr >> 12));
122         for (i = 1; i < 7; i++)
123                 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
124
125         r600_pcie_gart_tlb_flush(rdev);
126         rdev->gart.ready = true;
127         return 0;
128 }
129
130 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
131 {
132         u32 tmp;
133         int i, r;
134
135         /* Disable all tables */
136         for (i = 0; i < 7; i++)
137                 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
138
139         /* Setup L2 cache */
140         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
141                                 EFFECTIVE_L2_QUEUE_SIZE(7));
142         WREG32(VM_L2_CNTL2, 0);
143         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
144         /* Setup TLB control */
145         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
146         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
147         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
148         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
149         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
150         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
151         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
152         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
153         if (rdev->gart.table.vram.robj) {
154                 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
155                 if (likely(r == 0)) {
156                         radeon_bo_kunmap(rdev->gart.table.vram.robj);
157                         radeon_bo_unpin(rdev->gart.table.vram.robj);
158                         radeon_bo_unreserve(rdev->gart.table.vram.robj);
159                 }
160         }
161 }
162
163 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
164 {
165         evergreen_pcie_gart_disable(rdev);
166         radeon_gart_table_vram_free(rdev);
167         radeon_gart_fini(rdev);
168 }
169
170
171 void evergreen_agp_enable(struct radeon_device *rdev)
172 {
173         u32 tmp;
174         int i;
175
176         /* Setup L2 cache */
177         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
178                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
179                                 EFFECTIVE_L2_QUEUE_SIZE(7));
180         WREG32(VM_L2_CNTL2, 0);
181         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
182         /* Setup TLB control */
183         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
184                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
185                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
186                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
187         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
188         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
189         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
190         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
191         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
192         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
193         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
194         for (i = 0; i < 7; i++)
195                 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
196 }
197
198 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
199 {
200         save->vga_control[0] = RREG32(D1VGA_CONTROL);
201         save->vga_control[1] = RREG32(D2VGA_CONTROL);
202         save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
203         save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
204         save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
205         save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
206         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
207         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
208         save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
209         save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
210         save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
211         save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
212         save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
213         save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
214
215         /* Stop all video */
216         WREG32(VGA_RENDER_CONTROL, 0);
217         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
218         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
219         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
220         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
221         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
222         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
223         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
224         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
225         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
226         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
227         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
228         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
229         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
230         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
231         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
232         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
233         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
234         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
235
236         WREG32(D1VGA_CONTROL, 0);
237         WREG32(D2VGA_CONTROL, 0);
238         WREG32(EVERGREEN_D3VGA_CONTROL, 0);
239         WREG32(EVERGREEN_D4VGA_CONTROL, 0);
240         WREG32(EVERGREEN_D5VGA_CONTROL, 0);
241         WREG32(EVERGREEN_D6VGA_CONTROL, 0);
242 }
243
244 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
245 {
246         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
247                upper_32_bits(rdev->mc.vram_start));
248         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
249                upper_32_bits(rdev->mc.vram_start));
250         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
251                (u32)rdev->mc.vram_start);
252         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
253                (u32)rdev->mc.vram_start);
254
255         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
256                upper_32_bits(rdev->mc.vram_start));
257         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
258                upper_32_bits(rdev->mc.vram_start));
259         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
260                (u32)rdev->mc.vram_start);
261         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
262                (u32)rdev->mc.vram_start);
263
264         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
265                upper_32_bits(rdev->mc.vram_start));
266         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
267                upper_32_bits(rdev->mc.vram_start));
268         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
269                (u32)rdev->mc.vram_start);
270         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
271                (u32)rdev->mc.vram_start);
272
273         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
274                upper_32_bits(rdev->mc.vram_start));
275         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
276                upper_32_bits(rdev->mc.vram_start));
277         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
278                (u32)rdev->mc.vram_start);
279         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
280                (u32)rdev->mc.vram_start);
281
282         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
283                upper_32_bits(rdev->mc.vram_start));
284         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
285                upper_32_bits(rdev->mc.vram_start));
286         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
287                (u32)rdev->mc.vram_start);
288         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
289                (u32)rdev->mc.vram_start);
290
291         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
292                upper_32_bits(rdev->mc.vram_start));
293         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
294                upper_32_bits(rdev->mc.vram_start));
295         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
296                (u32)rdev->mc.vram_start);
297         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
298                (u32)rdev->mc.vram_start);
299
300         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
301         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
302         /* Unlock host access */
303         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
304         mdelay(1);
305         /* Restore video state */
306         WREG32(D1VGA_CONTROL, save->vga_control[0]);
307         WREG32(D2VGA_CONTROL, save->vga_control[1]);
308         WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
309         WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
310         WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
311         WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
312         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
313         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
314         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
315         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
316         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
317         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
318         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
319         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
320         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
321         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
322         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
323         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
324         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
325         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
326         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
327         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
328         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
329         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
330         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
331 }
332
333 static void evergreen_mc_program(struct radeon_device *rdev)
334 {
335         struct evergreen_mc_save save;
336         u32 tmp;
337         int i, j;
338
339         /* Initialize HDP */
340         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
341                 WREG32((0x2c14 + j), 0x00000000);
342                 WREG32((0x2c18 + j), 0x00000000);
343                 WREG32((0x2c1c + j), 0x00000000);
344                 WREG32((0x2c20 + j), 0x00000000);
345                 WREG32((0x2c24 + j), 0x00000000);
346         }
347         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
348
349         evergreen_mc_stop(rdev, &save);
350         if (evergreen_mc_wait_for_idle(rdev)) {
351                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
352         }
353         /* Lockout access through VGA aperture*/
354         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
355         /* Update configuration */
356         if (rdev->flags & RADEON_IS_AGP) {
357                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
358                         /* VRAM before AGP */
359                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
360                                 rdev->mc.vram_start >> 12);
361                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
362                                 rdev->mc.gtt_end >> 12);
363                 } else {
364                         /* VRAM after AGP */
365                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
366                                 rdev->mc.gtt_start >> 12);
367                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
368                                 rdev->mc.vram_end >> 12);
369                 }
370         } else {
371                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
372                         rdev->mc.vram_start >> 12);
373                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
374                         rdev->mc.vram_end >> 12);
375         }
376         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
377         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
378         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
379         WREG32(MC_VM_FB_LOCATION, tmp);
380         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
381         WREG32(HDP_NONSURFACE_INFO, (2 << 7));
382         WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
383         if (rdev->flags & RADEON_IS_AGP) {
384                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
385                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
386                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
387         } else {
388                 WREG32(MC_VM_AGP_BASE, 0);
389                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
390                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
391         }
392         if (evergreen_mc_wait_for_idle(rdev)) {
393                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
394         }
395         evergreen_mc_resume(rdev, &save);
396         /* we need to own VRAM, so turn off the VGA renderer here
397          * to stop it overwriting our objects */
398         rv515_vga_render_disable(rdev);
399 }
400
401 #if 0
402 /*
403  * CP.
404  */
405 static void evergreen_cp_stop(struct radeon_device *rdev)
406 {
407         /* XXX */
408 }
409
410
411 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
412 {
413         /* XXX */
414
415         return 0;
416 }
417
418
419 /*
420  * Core functions
421  */
422 static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
423                                                   u32 num_backends,
424                                                   u32 backend_disable_mask)
425 {
426         u32 backend_map = 0;
427
428         return backend_map;
429 }
430 #endif
431
432 static void evergreen_gpu_init(struct radeon_device *rdev)
433 {
434         /* XXX */
435 }
436
437 int evergreen_mc_init(struct radeon_device *rdev)
438 {
439         fixed20_12 a;
440         u32 tmp;
441         int chansize, numchan;
442
443         /* Get VRAM informations */
444         rdev->mc.vram_is_ddr = true;
445         tmp = RREG32(MC_ARB_RAMCFG);
446         if (tmp & CHANSIZE_OVERRIDE) {
447                 chansize = 16;
448         } else if (tmp & CHANSIZE_MASK) {
449                 chansize = 64;
450         } else {
451                 chansize = 32;
452         }
453         tmp = RREG32(MC_SHARED_CHMAP);
454         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
455         case 0:
456         default:
457                 numchan = 1;
458                 break;
459         case 1:
460                 numchan = 2;
461                 break;
462         case 2:
463                 numchan = 4;
464                 break;
465         case 3:
466                 numchan = 8;
467                 break;
468         }
469         rdev->mc.vram_width = numchan * chansize;
470         /* Could aper size report 0 ? */
471         rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
472         rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
473         /* Setup GPU memory space */
474         /* size in MB on evergreen */
475         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
476         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
477         /* FIXME remove this once we support unmappable VRAM */
478         if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
479                 rdev->mc.mc_vram_size = rdev->mc.aper_size;
480                 rdev->mc.real_vram_size = rdev->mc.aper_size;
481         }
482         r600_vram_gtt_location(rdev, &rdev->mc);
483         /* FIXME: we should enforce default clock in case GPU is not in
484          * default setup
485          */
486         a.full = rfixed_const(100);
487         rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
488         rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
489         return 0;
490 }
491
492 int evergreen_gpu_reset(struct radeon_device *rdev)
493 {
494         /* FIXME: implement for evergreen */
495         return 0;
496 }
497
498 static int evergreen_startup(struct radeon_device *rdev)
499 {
500 #if 0
501         int r;
502
503         if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
504                 r = r600_init_microcode(rdev);
505                 if (r) {
506                         DRM_ERROR("Failed to load firmware!\n");
507                         return r;
508                 }
509         }
510 #endif
511         evergreen_mc_program(rdev);
512 #if 0
513         if (rdev->flags & RADEON_IS_AGP) {
514                 evergreem_agp_enable(rdev);
515         } else {
516                 r = evergreen_pcie_gart_enable(rdev);
517                 if (r)
518                         return r;
519         }
520 #endif
521         evergreen_gpu_init(rdev);
522 #if 0
523         if (!rdev->r600_blit.shader_obj) {
524                 r = r600_blit_init(rdev);
525                 if (r) {
526                         DRM_ERROR("radeon: failed blitter (%d).\n", r);
527                         return r;
528                 }
529         }
530
531         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
532         if (unlikely(r != 0))
533                 return r;
534         r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
535                         &rdev->r600_blit.shader_gpu_addr);
536         radeon_bo_unreserve(rdev->r600_blit.shader_obj);
537         if (r) {
538                 DRM_ERROR("failed to pin blit object %d\n", r);
539                 return r;
540         }
541
542         /* Enable IRQ */
543         r = r600_irq_init(rdev);
544         if (r) {
545                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
546                 radeon_irq_kms_fini(rdev);
547                 return r;
548         }
549         r600_irq_set(rdev);
550
551         r = radeon_ring_init(rdev, rdev->cp.ring_size);
552         if (r)
553                 return r;
554         r = evergreen_cp_load_microcode(rdev);
555         if (r)
556                 return r;
557         r = r600_cp_resume(rdev);
558         if (r)
559                 return r;
560         /* write back buffer are not vital so don't worry about failure */
561         r600_wb_enable(rdev);
562 #endif
563         return 0;
564 }
565
566 int evergreen_resume(struct radeon_device *rdev)
567 {
568         int r;
569
570         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
571          * posting will perform necessary task to bring back GPU into good
572          * shape.
573          */
574         /* post card */
575         atom_asic_init(rdev->mode_info.atom_context);
576         /* Initialize clocks */
577         r = radeon_clocks_init(rdev);
578         if (r) {
579                 return r;
580         }
581
582         r = evergreen_startup(rdev);
583         if (r) {
584                 DRM_ERROR("r600 startup failed on resume\n");
585                 return r;
586         }
587 #if 0
588         r = r600_ib_test(rdev);
589         if (r) {
590                 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
591                 return r;
592         }
593 #endif
594         return r;
595
596 }
597
598 int evergreen_suspend(struct radeon_device *rdev)
599 {
600 #if 0
601         int r;
602
603         /* FIXME: we should wait for ring to be empty */
604         r700_cp_stop(rdev);
605         rdev->cp.ready = false;
606         r600_wb_disable(rdev);
607         evergreen_pcie_gart_disable(rdev);
608         /* unpin shaders bo */
609         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
610         if (likely(r == 0)) {
611                 radeon_bo_unpin(rdev->r600_blit.shader_obj);
612                 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
613         }
614 #endif
615         return 0;
616 }
617
618 static bool evergreen_card_posted(struct radeon_device *rdev)
619 {
620         u32 reg;
621
622         /* first check CRTCs */
623         reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
624                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
625                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
626                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
627                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
628                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
629         if (reg & EVERGREEN_CRTC_MASTER_EN)
630                 return true;
631
632         /* then check MEM_SIZE, in case the crtcs are off */
633         if (RREG32(CONFIG_MEMSIZE))
634                 return true;
635
636         return false;
637 }
638
639 /* Plan is to move initialization in that function and use
640  * helper function so that radeon_device_init pretty much
641  * do nothing more than calling asic specific function. This
642  * should also allow to remove a bunch of callback function
643  * like vram_info.
644  */
645 int evergreen_init(struct radeon_device *rdev)
646 {
647         int r;
648
649         r = radeon_dummy_page_init(rdev);
650         if (r)
651                 return r;
652         /* This don't do much */
653         r = radeon_gem_init(rdev);
654         if (r)
655                 return r;
656         /* Read BIOS */
657         if (!radeon_get_bios(rdev)) {
658                 if (ASIC_IS_AVIVO(rdev))
659                         return -EINVAL;
660         }
661         /* Must be an ATOMBIOS */
662         if (!rdev->is_atom_bios) {
663                 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
664                 return -EINVAL;
665         }
666         r = radeon_atombios_init(rdev);
667         if (r)
668                 return r;
669         /* Post card if necessary */
670         if (!evergreen_card_posted(rdev)) {
671                 if (!rdev->bios) {
672                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
673                         return -EINVAL;
674                 }
675                 DRM_INFO("GPU not posted. posting now...\n");
676                 atom_asic_init(rdev->mode_info.atom_context);
677         }
678         /* Initialize scratch registers */
679         r600_scratch_init(rdev);
680         /* Initialize surface registers */
681         radeon_surface_init(rdev);
682         /* Initialize clocks */
683         radeon_get_clock_info(rdev->ddev);
684         r = radeon_clocks_init(rdev);
685         if (r)
686                 return r;
687         /* Initialize power management */
688         radeon_pm_init(rdev);
689         /* Fence driver */
690         r = radeon_fence_driver_init(rdev);
691         if (r)
692                 return r;
693         /* initialize AGP */
694         if (rdev->flags & RADEON_IS_AGP) {
695                 r = radeon_agp_init(rdev);
696                 if (r)
697                         radeon_agp_disable(rdev);
698         }
699         /* initialize memory controller */
700         r = evergreen_mc_init(rdev);
701         if (r)
702                 return r;
703         /* Memory manager */
704         r = radeon_bo_init(rdev);
705         if (r)
706                 return r;
707 #if 0
708         r = radeon_irq_kms_init(rdev);
709         if (r)
710                 return r;
711
712         rdev->cp.ring_obj = NULL;
713         r600_ring_init(rdev, 1024 * 1024);
714
715         rdev->ih.ring_obj = NULL;
716         r600_ih_ring_init(rdev, 64 * 1024);
717
718         r = r600_pcie_gart_init(rdev);
719         if (r)
720                 return r;
721 #endif
722         rdev->accel_working = false;
723         r = evergreen_startup(rdev);
724         if (r) {
725                 evergreen_suspend(rdev);
726                 /*r600_wb_fini(rdev);*/
727                 /*radeon_ring_fini(rdev);*/
728                 /*evergreen_pcie_gart_fini(rdev);*/
729                 rdev->accel_working = false;
730         }
731         if (rdev->accel_working) {
732                 r = radeon_ib_pool_init(rdev);
733                 if (r) {
734                         DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
735                         rdev->accel_working = false;
736                 }
737                 r = r600_ib_test(rdev);
738                 if (r) {
739                         DRM_ERROR("radeon: failed testing IB (%d).\n", r);
740                         rdev->accel_working = false;
741                 }
742         }
743         return 0;
744 }
745
746 void evergreen_fini(struct radeon_device *rdev)
747 {
748         evergreen_suspend(rdev);
749 #if 0
750         r600_blit_fini(rdev);
751         r600_irq_fini(rdev);
752         radeon_irq_kms_fini(rdev);
753         radeon_ring_fini(rdev);
754         r600_wb_fini(rdev);
755         evergreen_pcie_gart_fini(rdev);
756 #endif
757         radeon_gem_fini(rdev);
758         radeon_fence_driver_fini(rdev);
759         radeon_clocks_fini(rdev);
760         radeon_agp_fini(rdev);
761         radeon_bo_fini(rdev);
762         radeon_atombios_fini(rdev);
763         kfree(rdev->bios);
764         rdev->bios = NULL;
765         radeon_dummy_page_fini(rdev);
766 }