57fe569682df8918261e3edd40578ea63b88c48e
[safe/jmp/linux-2.6] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include "drmP.h"
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_drm.h"
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34
35 #define EVERGREEN_PFP_UCODE_SIZE 1120
36 #define EVERGREEN_PM4_UCODE_SIZE 1376
37
38 static void evergreen_gpu_init(struct radeon_device *rdev);
39 void evergreen_fini(struct radeon_device *rdev);
40
41 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
42 {
43         bool connected = false;
44         /* XXX */
45         return connected;
46 }
47
48 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
49                                 enum radeon_hpd_id hpd)
50 {
51         /* XXX */
52 }
53
54 void evergreen_hpd_init(struct radeon_device *rdev)
55 {
56         /* XXX */
57 }
58
59
60 void evergreen_bandwidth_update(struct radeon_device *rdev)
61 {
62         /* XXX */
63 }
64
65 void evergreen_hpd_fini(struct radeon_device *rdev)
66 {
67         /* XXX */
68 }
69
70 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
71 {
72         unsigned i;
73         u32 tmp;
74
75         for (i = 0; i < rdev->usec_timeout; i++) {
76                 /* read MC_STATUS */
77                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
78                 if (!tmp)
79                         return 0;
80                 udelay(1);
81         }
82         return -1;
83 }
84
85 /*
86  * GART
87  */
88 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
89 {
90         unsigned i;
91         u32 tmp;
92
93         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
94         for (i = 0; i < rdev->usec_timeout; i++) {
95                 /* read MC_STATUS */
96                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
97                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
98                 if (tmp == 2) {
99                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
100                         return;
101                 }
102                 if (tmp) {
103                         return;
104                 }
105                 udelay(1);
106         }
107 }
108
109 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
110 {
111         u32 tmp;
112         int r;
113
114         if (rdev->gart.table.vram.robj == NULL) {
115                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
116                 return -EINVAL;
117         }
118         r = radeon_gart_table_vram_pin(rdev);
119         if (r)
120                 return r;
121         radeon_gart_restore(rdev);
122         /* Setup L2 cache */
123         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
124                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
125                                 EFFECTIVE_L2_QUEUE_SIZE(7));
126         WREG32(VM_L2_CNTL2, 0);
127         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
128         /* Setup TLB control */
129         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
130                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
131                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
132                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
133         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
134         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
135         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
136         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
137         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
138         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
139         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
140         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
141         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
142         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
143         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
144                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
145         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
146                         (u32)(rdev->dummy_page.addr >> 12));
147         WREG32(VM_CONTEXT1_CNTL, 0);
148
149         evergreen_pcie_gart_tlb_flush(rdev);
150         rdev->gart.ready = true;
151         return 0;
152 }
153
154 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
155 {
156         u32 tmp;
157         int r;
158
159         /* Disable all tables */
160         WREG32(VM_CONTEXT0_CNTL, 0);
161         WREG32(VM_CONTEXT1_CNTL, 0);
162
163         /* Setup L2 cache */
164         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
165                                 EFFECTIVE_L2_QUEUE_SIZE(7));
166         WREG32(VM_L2_CNTL2, 0);
167         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
168         /* Setup TLB control */
169         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
170         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
171         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
172         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
173         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
174         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
175         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
176         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
177         if (rdev->gart.table.vram.robj) {
178                 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
179                 if (likely(r == 0)) {
180                         radeon_bo_kunmap(rdev->gart.table.vram.robj);
181                         radeon_bo_unpin(rdev->gart.table.vram.robj);
182                         radeon_bo_unreserve(rdev->gart.table.vram.robj);
183                 }
184         }
185 }
186
187 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
188 {
189         evergreen_pcie_gart_disable(rdev);
190         radeon_gart_table_vram_free(rdev);
191         radeon_gart_fini(rdev);
192 }
193
194
195 void evergreen_agp_enable(struct radeon_device *rdev)
196 {
197         u32 tmp;
198
199         /* Setup L2 cache */
200         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
201                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
202                                 EFFECTIVE_L2_QUEUE_SIZE(7));
203         WREG32(VM_L2_CNTL2, 0);
204         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
205         /* Setup TLB control */
206         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
207                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
208                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
209                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
210         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
211         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
212         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
213         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
214         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
215         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
216         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
217         WREG32(VM_CONTEXT0_CNTL, 0);
218         WREG32(VM_CONTEXT1_CNTL, 0);
219 }
220
221 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
222 {
223         save->vga_control[0] = RREG32(D1VGA_CONTROL);
224         save->vga_control[1] = RREG32(D2VGA_CONTROL);
225         save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
226         save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
227         save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
228         save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
229         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
230         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
231         save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
232         save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
233         save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
234         save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
235         save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
236         save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
237
238         /* Stop all video */
239         WREG32(VGA_RENDER_CONTROL, 0);
240         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
241         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
242         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
243         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
244         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
245         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
246         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
247         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
248         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
249         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
250         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
251         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
252         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
253         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
254         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
255         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
256         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
257         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
258
259         WREG32(D1VGA_CONTROL, 0);
260         WREG32(D2VGA_CONTROL, 0);
261         WREG32(EVERGREEN_D3VGA_CONTROL, 0);
262         WREG32(EVERGREEN_D4VGA_CONTROL, 0);
263         WREG32(EVERGREEN_D5VGA_CONTROL, 0);
264         WREG32(EVERGREEN_D6VGA_CONTROL, 0);
265 }
266
267 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
268 {
269         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
270                upper_32_bits(rdev->mc.vram_start));
271         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
272                upper_32_bits(rdev->mc.vram_start));
273         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
274                (u32)rdev->mc.vram_start);
275         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
276                (u32)rdev->mc.vram_start);
277
278         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
279                upper_32_bits(rdev->mc.vram_start));
280         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
281                upper_32_bits(rdev->mc.vram_start));
282         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
283                (u32)rdev->mc.vram_start);
284         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
285                (u32)rdev->mc.vram_start);
286
287         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
288                upper_32_bits(rdev->mc.vram_start));
289         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
290                upper_32_bits(rdev->mc.vram_start));
291         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
292                (u32)rdev->mc.vram_start);
293         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
294                (u32)rdev->mc.vram_start);
295
296         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
297                upper_32_bits(rdev->mc.vram_start));
298         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
299                upper_32_bits(rdev->mc.vram_start));
300         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
301                (u32)rdev->mc.vram_start);
302         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
303                (u32)rdev->mc.vram_start);
304
305         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
306                upper_32_bits(rdev->mc.vram_start));
307         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
308                upper_32_bits(rdev->mc.vram_start));
309         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
310                (u32)rdev->mc.vram_start);
311         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
312                (u32)rdev->mc.vram_start);
313
314         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
315                upper_32_bits(rdev->mc.vram_start));
316         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
317                upper_32_bits(rdev->mc.vram_start));
318         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
319                (u32)rdev->mc.vram_start);
320         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
321                (u32)rdev->mc.vram_start);
322
323         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
324         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
325         /* Unlock host access */
326         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
327         mdelay(1);
328         /* Restore video state */
329         WREG32(D1VGA_CONTROL, save->vga_control[0]);
330         WREG32(D2VGA_CONTROL, save->vga_control[1]);
331         WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
332         WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
333         WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
334         WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
335         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
336         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
337         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
338         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
339         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
340         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
341         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
342         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
343         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
344         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
345         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
346         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
347         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
348         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
349         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
350         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
351         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
352         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
353         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
354 }
355
356 static void evergreen_mc_program(struct radeon_device *rdev)
357 {
358         struct evergreen_mc_save save;
359         u32 tmp;
360         int i, j;
361
362         /* Initialize HDP */
363         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
364                 WREG32((0x2c14 + j), 0x00000000);
365                 WREG32((0x2c18 + j), 0x00000000);
366                 WREG32((0x2c1c + j), 0x00000000);
367                 WREG32((0x2c20 + j), 0x00000000);
368                 WREG32((0x2c24 + j), 0x00000000);
369         }
370         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
371
372         evergreen_mc_stop(rdev, &save);
373         if (evergreen_mc_wait_for_idle(rdev)) {
374                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
375         }
376         /* Lockout access through VGA aperture*/
377         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
378         /* Update configuration */
379         if (rdev->flags & RADEON_IS_AGP) {
380                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
381                         /* VRAM before AGP */
382                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
383                                 rdev->mc.vram_start >> 12);
384                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
385                                 rdev->mc.gtt_end >> 12);
386                 } else {
387                         /* VRAM after AGP */
388                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
389                                 rdev->mc.gtt_start >> 12);
390                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
391                                 rdev->mc.vram_end >> 12);
392                 }
393         } else {
394                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
395                         rdev->mc.vram_start >> 12);
396                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
397                         rdev->mc.vram_end >> 12);
398         }
399         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
400         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
401         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
402         WREG32(MC_VM_FB_LOCATION, tmp);
403         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
404         WREG32(HDP_NONSURFACE_INFO, (2 << 7));
405         WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
406         if (rdev->flags & RADEON_IS_AGP) {
407                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
408                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
409                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
410         } else {
411                 WREG32(MC_VM_AGP_BASE, 0);
412                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
413                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
414         }
415         if (evergreen_mc_wait_for_idle(rdev)) {
416                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
417         }
418         evergreen_mc_resume(rdev, &save);
419         /* we need to own VRAM, so turn off the VGA renderer here
420          * to stop it overwriting our objects */
421         rv515_vga_render_disable(rdev);
422 }
423
424 /*
425  * CP.
426  */
427
428 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
429 {
430         const __be32 *fw_data;
431         int i;
432
433         if (!rdev->me_fw || !rdev->pfp_fw)
434                 return -EINVAL;
435
436         r700_cp_stop(rdev);
437         WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
438
439         fw_data = (const __be32 *)rdev->pfp_fw->data;
440         WREG32(CP_PFP_UCODE_ADDR, 0);
441         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
442                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
443         WREG32(CP_PFP_UCODE_ADDR, 0);
444
445         fw_data = (const __be32 *)rdev->me_fw->data;
446         WREG32(CP_ME_RAM_WADDR, 0);
447         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
448                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
449
450         WREG32(CP_PFP_UCODE_ADDR, 0);
451         WREG32(CP_ME_RAM_WADDR, 0);
452         WREG32(CP_ME_RAM_RADDR, 0);
453         return 0;
454 }
455
456 int evergreen_cp_resume(struct radeon_device *rdev)
457 {
458         u32 tmp;
459         u32 rb_bufsz;
460         int r;
461
462         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
463         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
464                                  SOFT_RESET_PA |
465                                  SOFT_RESET_SH |
466                                  SOFT_RESET_VGT |
467                                  SOFT_RESET_SX));
468         RREG32(GRBM_SOFT_RESET);
469         mdelay(15);
470         WREG32(GRBM_SOFT_RESET, 0);
471         RREG32(GRBM_SOFT_RESET);
472
473         /* Set ring buffer size */
474         rb_bufsz = drm_order(rdev->cp.ring_size / 8);
475         tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
476 #ifdef __BIG_ENDIAN
477         tmp |= BUF_SWAP_32BIT;
478 #endif
479         WREG32(CP_RB_CNTL, tmp);
480         WREG32(CP_SEM_WAIT_TIMER, 0x4);
481
482         /* Set the write pointer delay */
483         WREG32(CP_RB_WPTR_DELAY, 0);
484
485         /* Initialize the ring buffer's read and write pointers */
486         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
487         WREG32(CP_RB_RPTR_WR, 0);
488         WREG32(CP_RB_WPTR, 0);
489         WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
490         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
491         mdelay(1);
492         WREG32(CP_RB_CNTL, tmp);
493
494         WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
495         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
496
497         rdev->cp.rptr = RREG32(CP_RB_RPTR);
498         rdev->cp.wptr = RREG32(CP_RB_WPTR);
499
500         r600_cp_start(rdev);
501         rdev->cp.ready = true;
502         r = radeon_ring_test(rdev);
503         if (r) {
504                 rdev->cp.ready = false;
505                 return r;
506         }
507         return 0;
508 }
509
510 /*
511  * Core functions
512  */
513 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
514                                                   u32 num_tile_pipes,
515                                                   u32 num_backends,
516                                                   u32 backend_disable_mask)
517 {
518         u32 backend_map = 0;
519         u32 enabled_backends_mask = 0;
520         u32 enabled_backends_count = 0;
521         u32 cur_pipe;
522         u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
523         u32 cur_backend = 0;
524         u32 i;
525         bool force_no_swizzle;
526
527         if (num_tile_pipes > EVERGREEN_MAX_PIPES)
528                 num_tile_pipes = EVERGREEN_MAX_PIPES;
529         if (num_tile_pipes < 1)
530                 num_tile_pipes = 1;
531         if (num_backends > EVERGREEN_MAX_BACKENDS)
532                 num_backends = EVERGREEN_MAX_BACKENDS;
533         if (num_backends < 1)
534                 num_backends = 1;
535
536         for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
537                 if (((backend_disable_mask >> i) & 1) == 0) {
538                         enabled_backends_mask |= (1 << i);
539                         ++enabled_backends_count;
540                 }
541                 if (enabled_backends_count == num_backends)
542                         break;
543         }
544
545         if (enabled_backends_count == 0) {
546                 enabled_backends_mask = 1;
547                 enabled_backends_count = 1;
548         }
549
550         if (enabled_backends_count != num_backends)
551                 num_backends = enabled_backends_count;
552
553         memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
554         switch (rdev->family) {
555         case CHIP_CEDAR:
556         case CHIP_REDWOOD:
557                 force_no_swizzle = false;
558                 break;
559         case CHIP_CYPRESS:
560         case CHIP_HEMLOCK:
561         case CHIP_JUNIPER:
562         default:
563                 force_no_swizzle = true;
564                 break;
565         }
566         if (force_no_swizzle) {
567                 bool last_backend_enabled = false;
568
569                 force_no_swizzle = false;
570                 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
571                         if (((enabled_backends_mask >> i) & 1) == 1) {
572                                 if (last_backend_enabled)
573                                         force_no_swizzle = true;
574                                 last_backend_enabled = true;
575                         } else
576                                 last_backend_enabled = false;
577                 }
578         }
579
580         switch (num_tile_pipes) {
581         case 1:
582         case 3:
583         case 5:
584         case 7:
585                 DRM_ERROR("odd number of pipes!\n");
586                 break;
587         case 2:
588                 swizzle_pipe[0] = 0;
589                 swizzle_pipe[1] = 1;
590                 break;
591         case 4:
592                 if (force_no_swizzle) {
593                         swizzle_pipe[0] = 0;
594                         swizzle_pipe[1] = 1;
595                         swizzle_pipe[2] = 2;
596                         swizzle_pipe[3] = 3;
597                 } else {
598                         swizzle_pipe[0] = 0;
599                         swizzle_pipe[1] = 2;
600                         swizzle_pipe[2] = 1;
601                         swizzle_pipe[3] = 3;
602                 }
603                 break;
604         case 6:
605                 if (force_no_swizzle) {
606                         swizzle_pipe[0] = 0;
607                         swizzle_pipe[1] = 1;
608                         swizzle_pipe[2] = 2;
609                         swizzle_pipe[3] = 3;
610                         swizzle_pipe[4] = 4;
611                         swizzle_pipe[5] = 5;
612                 } else {
613                         swizzle_pipe[0] = 0;
614                         swizzle_pipe[1] = 2;
615                         swizzle_pipe[2] = 4;
616                         swizzle_pipe[3] = 1;
617                         swizzle_pipe[4] = 3;
618                         swizzle_pipe[5] = 5;
619                 }
620                 break;
621         case 8:
622                 if (force_no_swizzle) {
623                         swizzle_pipe[0] = 0;
624                         swizzle_pipe[1] = 1;
625                         swizzle_pipe[2] = 2;
626                         swizzle_pipe[3] = 3;
627                         swizzle_pipe[4] = 4;
628                         swizzle_pipe[5] = 5;
629                         swizzle_pipe[6] = 6;
630                         swizzle_pipe[7] = 7;
631                 } else {
632                         swizzle_pipe[0] = 0;
633                         swizzle_pipe[1] = 2;
634                         swizzle_pipe[2] = 4;
635                         swizzle_pipe[3] = 6;
636                         swizzle_pipe[4] = 1;
637                         swizzle_pipe[5] = 3;
638                         swizzle_pipe[6] = 5;
639                         swizzle_pipe[7] = 7;
640                 }
641                 break;
642         }
643
644         for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
645                 while (((1 << cur_backend) & enabled_backends_mask) == 0)
646                         cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
647
648                 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
649
650                 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
651         }
652
653         return backend_map;
654 }
655
656 static void evergreen_gpu_init(struct radeon_device *rdev)
657 {
658         u32 cc_rb_backend_disable = 0;
659         u32 cc_gc_shader_pipe_config;
660         u32 gb_addr_config = 0;
661         u32 mc_shared_chmap, mc_arb_ramcfg;
662         u32 gb_backend_map;
663         u32 grbm_gfx_index;
664         u32 sx_debug_1;
665         u32 smx_dc_ctl0;
666         u32 sq_config;
667         u32 sq_lds_resource_mgmt;
668         u32 sq_gpr_resource_mgmt_1;
669         u32 sq_gpr_resource_mgmt_2;
670         u32 sq_gpr_resource_mgmt_3;
671         u32 sq_thread_resource_mgmt;
672         u32 sq_thread_resource_mgmt_2;
673         u32 sq_stack_resource_mgmt_1;
674         u32 sq_stack_resource_mgmt_2;
675         u32 sq_stack_resource_mgmt_3;
676         u32 vgt_cache_invalidation;
677         u32 hdp_host_path_cntl;
678         int i, j, num_shader_engines, ps_thread_count;
679
680         switch (rdev->family) {
681         case CHIP_CYPRESS:
682         case CHIP_HEMLOCK:
683                 rdev->config.evergreen.num_ses = 2;
684                 rdev->config.evergreen.max_pipes = 4;
685                 rdev->config.evergreen.max_tile_pipes = 8;
686                 rdev->config.evergreen.max_simds = 10;
687                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
688                 rdev->config.evergreen.max_gprs = 256;
689                 rdev->config.evergreen.max_threads = 248;
690                 rdev->config.evergreen.max_gs_threads = 32;
691                 rdev->config.evergreen.max_stack_entries = 512;
692                 rdev->config.evergreen.sx_num_of_sets = 4;
693                 rdev->config.evergreen.sx_max_export_size = 256;
694                 rdev->config.evergreen.sx_max_export_pos_size = 64;
695                 rdev->config.evergreen.sx_max_export_smx_size = 192;
696                 rdev->config.evergreen.max_hw_contexts = 8;
697                 rdev->config.evergreen.sq_num_cf_insts = 2;
698
699                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
700                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
701                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
702                 break;
703         case CHIP_JUNIPER:
704                 rdev->config.evergreen.num_ses = 1;
705                 rdev->config.evergreen.max_pipes = 4;
706                 rdev->config.evergreen.max_tile_pipes = 4;
707                 rdev->config.evergreen.max_simds = 10;
708                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
709                 rdev->config.evergreen.max_gprs = 256;
710                 rdev->config.evergreen.max_threads = 248;
711                 rdev->config.evergreen.max_gs_threads = 32;
712                 rdev->config.evergreen.max_stack_entries = 512;
713                 rdev->config.evergreen.sx_num_of_sets = 4;
714                 rdev->config.evergreen.sx_max_export_size = 256;
715                 rdev->config.evergreen.sx_max_export_pos_size = 64;
716                 rdev->config.evergreen.sx_max_export_smx_size = 192;
717                 rdev->config.evergreen.max_hw_contexts = 8;
718                 rdev->config.evergreen.sq_num_cf_insts = 2;
719
720                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
721                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
722                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
723                 break;
724         case CHIP_REDWOOD:
725                 rdev->config.evergreen.num_ses = 1;
726                 rdev->config.evergreen.max_pipes = 4;
727                 rdev->config.evergreen.max_tile_pipes = 4;
728                 rdev->config.evergreen.max_simds = 5;
729                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
730                 rdev->config.evergreen.max_gprs = 256;
731                 rdev->config.evergreen.max_threads = 248;
732                 rdev->config.evergreen.max_gs_threads = 32;
733                 rdev->config.evergreen.max_stack_entries = 256;
734                 rdev->config.evergreen.sx_num_of_sets = 4;
735                 rdev->config.evergreen.sx_max_export_size = 256;
736                 rdev->config.evergreen.sx_max_export_pos_size = 64;
737                 rdev->config.evergreen.sx_max_export_smx_size = 192;
738                 rdev->config.evergreen.max_hw_contexts = 8;
739                 rdev->config.evergreen.sq_num_cf_insts = 2;
740
741                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
742                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
743                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
744                 break;
745         case CHIP_CEDAR:
746         default:
747                 rdev->config.evergreen.num_ses = 1;
748                 rdev->config.evergreen.max_pipes = 2;
749                 rdev->config.evergreen.max_tile_pipes = 2;
750                 rdev->config.evergreen.max_simds = 2;
751                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
752                 rdev->config.evergreen.max_gprs = 256;
753                 rdev->config.evergreen.max_threads = 192;
754                 rdev->config.evergreen.max_gs_threads = 16;
755                 rdev->config.evergreen.max_stack_entries = 256;
756                 rdev->config.evergreen.sx_num_of_sets = 4;
757                 rdev->config.evergreen.sx_max_export_size = 128;
758                 rdev->config.evergreen.sx_max_export_pos_size = 32;
759                 rdev->config.evergreen.sx_max_export_smx_size = 96;
760                 rdev->config.evergreen.max_hw_contexts = 4;
761                 rdev->config.evergreen.sq_num_cf_insts = 1;
762
763                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
764                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
765                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
766                 break;
767         }
768
769         /* Initialize HDP */
770         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
771                 WREG32((0x2c14 + j), 0x00000000);
772                 WREG32((0x2c18 + j), 0x00000000);
773                 WREG32((0x2c1c + j), 0x00000000);
774                 WREG32((0x2c20 + j), 0x00000000);
775                 WREG32((0x2c24 + j), 0x00000000);
776         }
777
778         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
779
780         cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
781
782         cc_gc_shader_pipe_config |=
783                 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
784                                   & EVERGREEN_MAX_PIPES_MASK);
785         cc_gc_shader_pipe_config |=
786                 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
787                                & EVERGREEN_MAX_SIMDS_MASK);
788
789         cc_rb_backend_disable =
790                 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
791                                 & EVERGREEN_MAX_BACKENDS_MASK);
792
793
794         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
795         mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
796
797         switch (rdev->config.evergreen.max_tile_pipes) {
798         case 1:
799         default:
800                 gb_addr_config |= NUM_PIPES(0);
801                 break;
802         case 2:
803                 gb_addr_config |= NUM_PIPES(1);
804                 break;
805         case 4:
806                 gb_addr_config |= NUM_PIPES(2);
807                 break;
808         case 8:
809                 gb_addr_config |= NUM_PIPES(3);
810                 break;
811         }
812
813         gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
814         gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
815         gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
816         gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
817         gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
818         gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
819
820         if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
821                 gb_addr_config |= ROW_SIZE(2);
822         else
823                 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
824
825         if (rdev->ddev->pdev->device == 0x689e) {
826                 u32 efuse_straps_4;
827                 u32 efuse_straps_3;
828                 u8 efuse_box_bit_131_124;
829
830                 WREG32(RCU_IND_INDEX, 0x204);
831                 efuse_straps_4 = RREG32(RCU_IND_DATA);
832                 WREG32(RCU_IND_INDEX, 0x203);
833                 efuse_straps_3 = RREG32(RCU_IND_DATA);
834                 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
835
836                 switch(efuse_box_bit_131_124) {
837                 case 0x00:
838                         gb_backend_map = 0x76543210;
839                         break;
840                 case 0x55:
841                         gb_backend_map = 0x77553311;
842                         break;
843                 case 0x56:
844                         gb_backend_map = 0x77553300;
845                         break;
846                 case 0x59:
847                         gb_backend_map = 0x77552211;
848                         break;
849                 case 0x66:
850                         gb_backend_map = 0x77443300;
851                         break;
852                 case 0x99:
853                         gb_backend_map = 0x66552211;
854                         break;
855                 case 0x5a:
856                         gb_backend_map = 0x77552200;
857                         break;
858                 case 0xaa:
859                         gb_backend_map = 0x66442200;
860                         break;
861                 case 0x95:
862                         gb_backend_map = 0x66553311;
863                         break;
864                 default:
865                         DRM_ERROR("bad backend map, using default\n");
866                         gb_backend_map =
867                                 evergreen_get_tile_pipe_to_backend_map(rdev,
868                                                                        rdev->config.evergreen.max_tile_pipes,
869                                                                        rdev->config.evergreen.max_backends,
870                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
871                                                                    rdev->config.evergreen.max_backends) &
872                                                                         EVERGREEN_MAX_BACKENDS_MASK));
873                         break;
874                 }
875         } else if (rdev->ddev->pdev->device == 0x68b9) {
876                 u32 efuse_straps_3;
877                 u8 efuse_box_bit_127_124;
878
879                 WREG32(RCU_IND_INDEX, 0x203);
880                 efuse_straps_3 = RREG32(RCU_IND_DATA);
881                 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
882
883                 switch(efuse_box_bit_127_124) {
884                 case 0x0:
885                         gb_backend_map = 0x00003210;
886                         break;
887                 case 0x5:
888                 case 0x6:
889                 case 0x9:
890                 case 0xa:
891                         gb_backend_map = 0x00003311;
892                         break;
893                 default:
894                         DRM_ERROR("bad backend map, using default\n");
895                         gb_backend_map =
896                                 evergreen_get_tile_pipe_to_backend_map(rdev,
897                                                                        rdev->config.evergreen.max_tile_pipes,
898                                                                        rdev->config.evergreen.max_backends,
899                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
900                                                                    rdev->config.evergreen.max_backends) &
901                                                                         EVERGREEN_MAX_BACKENDS_MASK));
902                         break;
903                 }
904         } else
905                 gb_backend_map =
906                         evergreen_get_tile_pipe_to_backend_map(rdev,
907                                                                rdev->config.evergreen.max_tile_pipes,
908                                                                rdev->config.evergreen.max_backends,
909                                                                ((EVERGREEN_MAX_BACKENDS_MASK <<
910                                                                  rdev->config.evergreen.max_backends) &
911                                                                 EVERGREEN_MAX_BACKENDS_MASK));
912
913         WREG32(GB_BACKEND_MAP, gb_backend_map);
914         WREG32(GB_ADDR_CONFIG, gb_addr_config);
915         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
916         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
917
918         num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
919         grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
920
921         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
922                 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
923                 u32 sp = cc_gc_shader_pipe_config;
924                 u32 gfx = grbm_gfx_index | SE_INDEX(i);
925
926                 if (i == num_shader_engines) {
927                         rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
928                         sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
929                 }
930
931                 WREG32(GRBM_GFX_INDEX, gfx);
932                 WREG32(RLC_GFX_INDEX, gfx);
933
934                 WREG32(CC_RB_BACKEND_DISABLE, rb);
935                 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
936                 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
937                 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
938         }
939
940         grbm_gfx_index |= SE_BROADCAST_WRITES;
941         WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
942         WREG32(RLC_GFX_INDEX, grbm_gfx_index);
943
944         WREG32(CGTS_SYS_TCC_DISABLE, 0);
945         WREG32(CGTS_TCC_DISABLE, 0);
946         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
947         WREG32(CGTS_USER_TCC_DISABLE, 0);
948
949         /* set HW defaults for 3D engine */
950         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
951                                      ROQ_IB2_START(0x2b)));
952
953         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
954
955         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
956                              SYNC_GRADIENT |
957                              SYNC_WALKER |
958                              SYNC_ALIGNER));
959
960         sx_debug_1 = RREG32(SX_DEBUG_1);
961         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
962         WREG32(SX_DEBUG_1, sx_debug_1);
963
964
965         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
966         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
967         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
968         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
969
970         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
971                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
972                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
973
974         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
975                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
976                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
977
978         WREG32(VGT_NUM_INSTANCES, 1);
979         WREG32(SPI_CONFIG_CNTL, 0);
980         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
981         WREG32(CP_PERFMON_CNTL, 0);
982
983         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
984                                   FETCH_FIFO_HIWATER(0x4) |
985                                   DONE_FIFO_HIWATER(0xe0) |
986                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
987
988         sq_config = RREG32(SQ_CONFIG);
989         sq_config &= ~(PS_PRIO(3) |
990                        VS_PRIO(3) |
991                        GS_PRIO(3) |
992                        ES_PRIO(3));
993         sq_config |= (VC_ENABLE |
994                       EXPORT_SRC_C |
995                       PS_PRIO(0) |
996                       VS_PRIO(1) |
997                       GS_PRIO(2) |
998                       ES_PRIO(3));
999
1000         if (rdev->family == CHIP_CEDAR)
1001                 /* no vertex cache */
1002                 sq_config &= ~VC_ENABLE;
1003
1004         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1005
1006         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1007         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1008         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1009         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1010         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1011         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1012         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1013
1014         if (rdev->family == CHIP_CEDAR)
1015                 ps_thread_count = 96;
1016         else
1017                 ps_thread_count = 128;
1018
1019         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1020         sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1021         sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1022         sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1023         sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1024         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1025
1026         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1027         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1028         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1029         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1030         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1031         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1032
1033         WREG32(SQ_CONFIG, sq_config);
1034         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1035         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1036         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1037         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1038         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1039         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1040         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1041         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1042         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1043         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1044
1045         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1046                                           FORCE_EOV_MAX_REZ_CNT(255)));
1047
1048         if (rdev->family == CHIP_CEDAR)
1049                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1050         else
1051                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1052         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1053         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1054
1055         WREG32(VGT_GS_VERTEX_REUSE, 16);
1056         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1057
1058         WREG32(CB_PERF_CTR0_SEL_0, 0);
1059         WREG32(CB_PERF_CTR0_SEL_1, 0);
1060         WREG32(CB_PERF_CTR1_SEL_0, 0);
1061         WREG32(CB_PERF_CTR1_SEL_1, 0);
1062         WREG32(CB_PERF_CTR2_SEL_0, 0);
1063         WREG32(CB_PERF_CTR2_SEL_1, 0);
1064         WREG32(CB_PERF_CTR3_SEL_0, 0);
1065         WREG32(CB_PERF_CTR3_SEL_1, 0);
1066
1067         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1068         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1069
1070         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1071
1072         udelay(50);
1073
1074 }
1075
1076 int evergreen_mc_init(struct radeon_device *rdev)
1077 {
1078         u32 tmp;
1079         int chansize, numchan;
1080
1081         /* Get VRAM informations */
1082         rdev->mc.vram_is_ddr = true;
1083         tmp = RREG32(MC_ARB_RAMCFG);
1084         if (tmp & CHANSIZE_OVERRIDE) {
1085                 chansize = 16;
1086         } else if (tmp & CHANSIZE_MASK) {
1087                 chansize = 64;
1088         } else {
1089                 chansize = 32;
1090         }
1091         tmp = RREG32(MC_SHARED_CHMAP);
1092         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1093         case 0:
1094         default:
1095                 numchan = 1;
1096                 break;
1097         case 1:
1098                 numchan = 2;
1099                 break;
1100         case 2:
1101                 numchan = 4;
1102                 break;
1103         case 3:
1104                 numchan = 8;
1105                 break;
1106         }
1107         rdev->mc.vram_width = numchan * chansize;
1108         /* Could aper size report 0 ? */
1109         rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1110         rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1111         /* Setup GPU memory space */
1112         /* size in MB on evergreen */
1113         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1114         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1115         rdev->mc.visible_vram_size = rdev->mc.aper_size;
1116         /* FIXME remove this once we support unmappable VRAM */
1117         if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
1118                 rdev->mc.mc_vram_size = rdev->mc.aper_size;
1119                 rdev->mc.real_vram_size = rdev->mc.aper_size;
1120         }
1121         r600_vram_gtt_location(rdev, &rdev->mc);
1122         radeon_update_bandwidth_info(rdev);
1123
1124         return 0;
1125 }
1126
1127 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1128 {
1129         /* FIXME: implement for evergreen */
1130         return false;
1131 }
1132
1133 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1134 {
1135         struct evergreen_mc_save save;
1136         u32 srbm_reset = 0;
1137         u32 grbm_reset = 0;
1138
1139         dev_info(rdev->dev, "GPU softreset \n");
1140         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1141                 RREG32(GRBM_STATUS));
1142         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1143                 RREG32(GRBM_STATUS_SE0));
1144         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1145                 RREG32(GRBM_STATUS_SE1));
1146         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1147                 RREG32(SRBM_STATUS));
1148         evergreen_mc_stop(rdev, &save);
1149         if (evergreen_mc_wait_for_idle(rdev)) {
1150                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1151         }
1152         /* Disable CP parsing/prefetching */
1153         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1154
1155         /* reset all the gfx blocks */
1156         grbm_reset = (SOFT_RESET_CP |
1157                       SOFT_RESET_CB |
1158                       SOFT_RESET_DB |
1159                       SOFT_RESET_PA |
1160                       SOFT_RESET_SC |
1161                       SOFT_RESET_SPI |
1162                       SOFT_RESET_SH |
1163                       SOFT_RESET_SX |
1164                       SOFT_RESET_TC |
1165                       SOFT_RESET_TA |
1166                       SOFT_RESET_VC |
1167                       SOFT_RESET_VGT);
1168
1169         dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1170         WREG32(GRBM_SOFT_RESET, grbm_reset);
1171         (void)RREG32(GRBM_SOFT_RESET);
1172         udelay(50);
1173         WREG32(GRBM_SOFT_RESET, 0);
1174         (void)RREG32(GRBM_SOFT_RESET);
1175
1176         /* reset all the system blocks */
1177         srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1178
1179         dev_info(rdev->dev, "  SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1180         WREG32(SRBM_SOFT_RESET, srbm_reset);
1181         (void)RREG32(SRBM_SOFT_RESET);
1182         udelay(50);
1183         WREG32(SRBM_SOFT_RESET, 0);
1184         (void)RREG32(SRBM_SOFT_RESET);
1185         /* Wait a little for things to settle down */
1186         udelay(50);
1187         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1188                 RREG32(GRBM_STATUS));
1189         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1190                 RREG32(GRBM_STATUS_SE0));
1191         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1192                 RREG32(GRBM_STATUS_SE1));
1193         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1194                 RREG32(SRBM_STATUS));
1195         /* After reset we need to reinit the asic as GPU often endup in an
1196          * incoherent state.
1197          */
1198         atom_asic_init(rdev->mode_info.atom_context);
1199         evergreen_mc_resume(rdev, &save);
1200         return 0;
1201 }
1202
1203 int evergreen_asic_reset(struct radeon_device *rdev)
1204 {
1205         return evergreen_gpu_soft_reset(rdev);
1206 }
1207
1208 static int evergreen_startup(struct radeon_device *rdev)
1209 {
1210         int r;
1211
1212         /* XXX until interrupts are supported */
1213         if (!rdev->me_fw || !rdev->pfp_fw /*|| !rdev->rlc_fw*/) {
1214                 r = r600_init_microcode(rdev);
1215                 if (r) {
1216                         DRM_ERROR("Failed to load firmware!\n");
1217                         return r;
1218                 }
1219         }
1220
1221         evergreen_mc_program(rdev);
1222         if (rdev->flags & RADEON_IS_AGP) {
1223                 evergreen_agp_enable(rdev);
1224         } else {
1225                 r = evergreen_pcie_gart_enable(rdev);
1226                 if (r)
1227                         return r;
1228         }
1229         evergreen_gpu_init(rdev);
1230 #if 0
1231         if (!rdev->r600_blit.shader_obj) {
1232                 r = r600_blit_init(rdev);
1233                 if (r) {
1234                         DRM_ERROR("radeon: failed blitter (%d).\n", r);
1235                         return r;
1236                 }
1237         }
1238
1239         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1240         if (unlikely(r != 0))
1241                 return r;
1242         r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1243                         &rdev->r600_blit.shader_gpu_addr);
1244         radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1245         if (r) {
1246                 DRM_ERROR("failed to pin blit object %d\n", r);
1247                 return r;
1248         }
1249
1250         /* Enable IRQ */
1251         r = r600_irq_init(rdev);
1252         if (r) {
1253                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1254                 radeon_irq_kms_fini(rdev);
1255                 return r;
1256         }
1257         r600_irq_set(rdev);
1258 #endif
1259
1260         r = radeon_ring_init(rdev, rdev->cp.ring_size);
1261         if (r)
1262                 return r;
1263         r = evergreen_cp_load_microcode(rdev);
1264         if (r)
1265                 return r;
1266         r = evergreen_cp_resume(rdev);
1267         if (r)
1268                 return r;
1269         /* write back buffer are not vital so don't worry about failure */
1270         r600_wb_enable(rdev);
1271
1272         return 0;
1273 }
1274
1275 int evergreen_resume(struct radeon_device *rdev)
1276 {
1277         int r;
1278
1279         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
1280          * posting will perform necessary task to bring back GPU into good
1281          * shape.
1282          */
1283         /* post card */
1284         atom_asic_init(rdev->mode_info.atom_context);
1285         /* Initialize clocks */
1286         r = radeon_clocks_init(rdev);
1287         if (r) {
1288                 return r;
1289         }
1290
1291         r = evergreen_startup(rdev);
1292         if (r) {
1293                 DRM_ERROR("r600 startup failed on resume\n");
1294                 return r;
1295         }
1296
1297         r = r600_ib_test(rdev);
1298         if (r) {
1299                 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1300                 return r;
1301         }
1302
1303         return r;
1304
1305 }
1306
1307 int evergreen_suspend(struct radeon_device *rdev)
1308 {
1309 #if 0
1310         int r;
1311 #endif
1312         /* FIXME: we should wait for ring to be empty */
1313         r700_cp_stop(rdev);
1314         rdev->cp.ready = false;
1315         r600_wb_disable(rdev);
1316
1317         evergreen_pcie_gart_disable(rdev);
1318 #if 0
1319         /* unpin shaders bo */
1320         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1321         if (likely(r == 0)) {
1322                 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1323                 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1324         }
1325 #endif
1326         return 0;
1327 }
1328
1329 static bool evergreen_card_posted(struct radeon_device *rdev)
1330 {
1331         u32 reg;
1332
1333         /* first check CRTCs */
1334         reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
1335                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
1336                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
1337                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
1338                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
1339                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
1340         if (reg & EVERGREEN_CRTC_MASTER_EN)
1341                 return true;
1342
1343         /* then check MEM_SIZE, in case the crtcs are off */
1344         if (RREG32(CONFIG_MEMSIZE))
1345                 return true;
1346
1347         return false;
1348 }
1349
1350 /* Plan is to move initialization in that function and use
1351  * helper function so that radeon_device_init pretty much
1352  * do nothing more than calling asic specific function. This
1353  * should also allow to remove a bunch of callback function
1354  * like vram_info.
1355  */
1356 int evergreen_init(struct radeon_device *rdev)
1357 {
1358         int r;
1359
1360         r = radeon_dummy_page_init(rdev);
1361         if (r)
1362                 return r;
1363         /* This don't do much */
1364         r = radeon_gem_init(rdev);
1365         if (r)
1366                 return r;
1367         /* Read BIOS */
1368         if (!radeon_get_bios(rdev)) {
1369                 if (ASIC_IS_AVIVO(rdev))
1370                         return -EINVAL;
1371         }
1372         /* Must be an ATOMBIOS */
1373         if (!rdev->is_atom_bios) {
1374                 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
1375                 return -EINVAL;
1376         }
1377         r = radeon_atombios_init(rdev);
1378         if (r)
1379                 return r;
1380         /* Post card if necessary */
1381         if (!evergreen_card_posted(rdev)) {
1382                 if (!rdev->bios) {
1383                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
1384                         return -EINVAL;
1385                 }
1386                 DRM_INFO("GPU not posted. posting now...\n");
1387                 atom_asic_init(rdev->mode_info.atom_context);
1388         }
1389         /* Initialize scratch registers */
1390         r600_scratch_init(rdev);
1391         /* Initialize surface registers */
1392         radeon_surface_init(rdev);
1393         /* Initialize clocks */
1394         radeon_get_clock_info(rdev->ddev);
1395         r = radeon_clocks_init(rdev);
1396         if (r)
1397                 return r;
1398         /* Initialize power management */
1399         radeon_pm_init(rdev);
1400         /* Fence driver */
1401         r = radeon_fence_driver_init(rdev);
1402         if (r)
1403                 return r;
1404         /* initialize AGP */
1405         if (rdev->flags & RADEON_IS_AGP) {
1406                 r = radeon_agp_init(rdev);
1407                 if (r)
1408                         radeon_agp_disable(rdev);
1409         }
1410         /* initialize memory controller */
1411         r = evergreen_mc_init(rdev);
1412         if (r)
1413                 return r;
1414         /* Memory manager */
1415         r = radeon_bo_init(rdev);
1416         if (r)
1417                 return r;
1418 #if 0
1419         r = radeon_irq_kms_init(rdev);
1420         if (r)
1421                 return r;
1422 #endif
1423         rdev->cp.ring_obj = NULL;
1424         r600_ring_init(rdev, 1024 * 1024);
1425 #if 0
1426         rdev->ih.ring_obj = NULL;
1427         r600_ih_ring_init(rdev, 64 * 1024);
1428 #endif
1429         r = r600_pcie_gart_init(rdev);
1430         if (r)
1431                 return r;
1432
1433         rdev->accel_working = false;
1434         r = evergreen_startup(rdev);
1435         if (r) {
1436                 dev_err(rdev->dev, "disabling GPU acceleration\n");
1437                 r700_cp_fini(rdev);
1438                 r600_wb_fini(rdev);
1439 #if 0
1440                 r600_irq_fini(rdev);
1441                 radeon_irq_kms_fini(rdev);
1442 #endif
1443                 evergreen_pcie_gart_fini(rdev);
1444                 rdev->accel_working = false;
1445         }
1446         if (rdev->accel_working) {
1447                 r = radeon_ib_pool_init(rdev);
1448                 if (r) {
1449                         DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
1450                         rdev->accel_working = false;
1451                 }
1452                 r = r600_ib_test(rdev);
1453                 if (r) {
1454                         DRM_ERROR("radeon: failed testing IB (%d).\n", r);
1455                         rdev->accel_working = false;
1456                 }
1457         }
1458         return 0;
1459 }
1460
1461 void evergreen_fini(struct radeon_device *rdev)
1462 {
1463         radeon_pm_fini(rdev);
1464         evergreen_suspend(rdev);
1465 #if 0
1466         r600_blit_fini(rdev);
1467         r600_irq_fini(rdev);
1468         radeon_irq_kms_fini(rdev);
1469         radeon_ring_fini(rdev);
1470         r600_wb_fini(rdev);
1471 #endif
1472         evergreen_pcie_gart_fini(rdev);
1473         radeon_gem_fini(rdev);
1474         radeon_fence_driver_fini(rdev);
1475         radeon_clocks_fini(rdev);
1476         radeon_agp_fini(rdev);
1477         radeon_bo_fini(rdev);
1478         radeon_atombios_fini(rdev);
1479         kfree(rdev->bios);
1480         rdev->bios = NULL;
1481         radeon_dummy_page_fini(rdev);
1482 }