drm/radeon/kms/evergreen: add hpd support
[safe/jmp/linux-2.6] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include "drmP.h"
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_drm.h"
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34
35 #define EVERGREEN_PFP_UCODE_SIZE 1120
36 #define EVERGREEN_PM4_UCODE_SIZE 1376
37
38 static void evergreen_gpu_init(struct radeon_device *rdev);
39 void evergreen_fini(struct radeon_device *rdev);
40
41 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
42 {
43         bool connected = false;
44
45         switch (hpd) {
46         case RADEON_HPD_1:
47                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
48                         connected = true;
49                 break;
50         case RADEON_HPD_2:
51                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
52                         connected = true;
53                 break;
54         case RADEON_HPD_3:
55                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
56                         connected = true;
57                 break;
58         case RADEON_HPD_4:
59                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
60                         connected = true;
61                 break;
62         case RADEON_HPD_5:
63                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
64                         connected = true;
65                 break;
66         case RADEON_HPD_6:
67                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
68                         connected = true;
69                         break;
70         default:
71                 break;
72         }
73
74         return connected;
75 }
76
77 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
78                                 enum radeon_hpd_id hpd)
79 {
80         u32 tmp;
81         bool connected = evergreen_hpd_sense(rdev, hpd);
82
83         switch (hpd) {
84         case RADEON_HPD_1:
85                 tmp = RREG32(DC_HPD1_INT_CONTROL);
86                 if (connected)
87                         tmp &= ~DC_HPDx_INT_POLARITY;
88                 else
89                         tmp |= DC_HPDx_INT_POLARITY;
90                 WREG32(DC_HPD1_INT_CONTROL, tmp);
91                 break;
92         case RADEON_HPD_2:
93                 tmp = RREG32(DC_HPD2_INT_CONTROL);
94                 if (connected)
95                         tmp &= ~DC_HPDx_INT_POLARITY;
96                 else
97                         tmp |= DC_HPDx_INT_POLARITY;
98                 WREG32(DC_HPD2_INT_CONTROL, tmp);
99                 break;
100         case RADEON_HPD_3:
101                 tmp = RREG32(DC_HPD3_INT_CONTROL);
102                 if (connected)
103                         tmp &= ~DC_HPDx_INT_POLARITY;
104                 else
105                         tmp |= DC_HPDx_INT_POLARITY;
106                 WREG32(DC_HPD3_INT_CONTROL, tmp);
107                 break;
108         case RADEON_HPD_4:
109                 tmp = RREG32(DC_HPD4_INT_CONTROL);
110                 if (connected)
111                         tmp &= ~DC_HPDx_INT_POLARITY;
112                 else
113                         tmp |= DC_HPDx_INT_POLARITY;
114                 WREG32(DC_HPD4_INT_CONTROL, tmp);
115                 break;
116         case RADEON_HPD_5:
117                 tmp = RREG32(DC_HPD5_INT_CONTROL);
118                 if (connected)
119                         tmp &= ~DC_HPDx_INT_POLARITY;
120                 else
121                         tmp |= DC_HPDx_INT_POLARITY;
122                 WREG32(DC_HPD5_INT_CONTROL, tmp);
123                         break;
124         case RADEON_HPD_6:
125                 tmp = RREG32(DC_HPD6_INT_CONTROL);
126                 if (connected)
127                         tmp &= ~DC_HPDx_INT_POLARITY;
128                 else
129                         tmp |= DC_HPDx_INT_POLARITY;
130                 WREG32(DC_HPD6_INT_CONTROL, tmp);
131                 break;
132         default:
133                 break;
134         }
135 }
136
137 void evergreen_hpd_init(struct radeon_device *rdev)
138 {
139         struct drm_device *dev = rdev->ddev;
140         struct drm_connector *connector;
141         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
142                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
143
144         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
145                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
146                 switch (radeon_connector->hpd.hpd) {
147                 case RADEON_HPD_1:
148                         WREG32(DC_HPD1_CONTROL, tmp);
149                         rdev->irq.hpd[0] = true;
150                         break;
151                 case RADEON_HPD_2:
152                         WREG32(DC_HPD2_CONTROL, tmp);
153                         rdev->irq.hpd[1] = true;
154                         break;
155                 case RADEON_HPD_3:
156                         WREG32(DC_HPD3_CONTROL, tmp);
157                         rdev->irq.hpd[2] = true;
158                         break;
159                 case RADEON_HPD_4:
160                         WREG32(DC_HPD4_CONTROL, tmp);
161                         rdev->irq.hpd[3] = true;
162                         break;
163                 case RADEON_HPD_5:
164                         WREG32(DC_HPD5_CONTROL, tmp);
165                         rdev->irq.hpd[4] = true;
166                         break;
167                 case RADEON_HPD_6:
168                         WREG32(DC_HPD6_CONTROL, tmp);
169                         rdev->irq.hpd[5] = true;
170                         break;
171                 default:
172                         break;
173                 }
174         }
175         if (rdev->irq.installed)
176                 evergreen_irq_set(rdev);
177 }
178
179 void evergreen_hpd_fini(struct radeon_device *rdev)
180 {
181         struct drm_device *dev = rdev->ddev;
182         struct drm_connector *connector;
183
184         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
185                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
186                 switch (radeon_connector->hpd.hpd) {
187                 case RADEON_HPD_1:
188                         WREG32(DC_HPD1_CONTROL, 0);
189                         rdev->irq.hpd[0] = false;
190                         break;
191                 case RADEON_HPD_2:
192                         WREG32(DC_HPD2_CONTROL, 0);
193                         rdev->irq.hpd[1] = false;
194                         break;
195                 case RADEON_HPD_3:
196                         WREG32(DC_HPD3_CONTROL, 0);
197                         rdev->irq.hpd[2] = false;
198                         break;
199                 case RADEON_HPD_4:
200                         WREG32(DC_HPD4_CONTROL, 0);
201                         rdev->irq.hpd[3] = false;
202                         break;
203                 case RADEON_HPD_5:
204                         WREG32(DC_HPD5_CONTROL, 0);
205                         rdev->irq.hpd[4] = false;
206                         break;
207                 case RADEON_HPD_6:
208                         WREG32(DC_HPD6_CONTROL, 0);
209                         rdev->irq.hpd[5] = false;
210                         break;
211                 default:
212                         break;
213                 }
214         }
215 }
216
217 void evergreen_bandwidth_update(struct radeon_device *rdev)
218 {
219         /* XXX */
220 }
221
222 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
223 {
224         unsigned i;
225         u32 tmp;
226
227         for (i = 0; i < rdev->usec_timeout; i++) {
228                 /* read MC_STATUS */
229                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
230                 if (!tmp)
231                         return 0;
232                 udelay(1);
233         }
234         return -1;
235 }
236
237 /*
238  * GART
239  */
240 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
241 {
242         unsigned i;
243         u32 tmp;
244
245         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
246         for (i = 0; i < rdev->usec_timeout; i++) {
247                 /* read MC_STATUS */
248                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
249                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
250                 if (tmp == 2) {
251                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
252                         return;
253                 }
254                 if (tmp) {
255                         return;
256                 }
257                 udelay(1);
258         }
259 }
260
261 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
262 {
263         u32 tmp;
264         int r;
265
266         if (rdev->gart.table.vram.robj == NULL) {
267                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
268                 return -EINVAL;
269         }
270         r = radeon_gart_table_vram_pin(rdev);
271         if (r)
272                 return r;
273         radeon_gart_restore(rdev);
274         /* Setup L2 cache */
275         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
276                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
277                                 EFFECTIVE_L2_QUEUE_SIZE(7));
278         WREG32(VM_L2_CNTL2, 0);
279         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
280         /* Setup TLB control */
281         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
282                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
283                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
284                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
285         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
286         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
287         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
288         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
289         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
290         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
291         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
292         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
293         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
294         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
295         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
296                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
297         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
298                         (u32)(rdev->dummy_page.addr >> 12));
299         WREG32(VM_CONTEXT1_CNTL, 0);
300
301         evergreen_pcie_gart_tlb_flush(rdev);
302         rdev->gart.ready = true;
303         return 0;
304 }
305
306 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
307 {
308         u32 tmp;
309         int r;
310
311         /* Disable all tables */
312         WREG32(VM_CONTEXT0_CNTL, 0);
313         WREG32(VM_CONTEXT1_CNTL, 0);
314
315         /* Setup L2 cache */
316         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
317                                 EFFECTIVE_L2_QUEUE_SIZE(7));
318         WREG32(VM_L2_CNTL2, 0);
319         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
320         /* Setup TLB control */
321         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
322         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
323         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
324         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
325         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
326         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
327         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
328         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
329         if (rdev->gart.table.vram.robj) {
330                 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
331                 if (likely(r == 0)) {
332                         radeon_bo_kunmap(rdev->gart.table.vram.robj);
333                         radeon_bo_unpin(rdev->gart.table.vram.robj);
334                         radeon_bo_unreserve(rdev->gart.table.vram.robj);
335                 }
336         }
337 }
338
339 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
340 {
341         evergreen_pcie_gart_disable(rdev);
342         radeon_gart_table_vram_free(rdev);
343         radeon_gart_fini(rdev);
344 }
345
346
347 void evergreen_agp_enable(struct radeon_device *rdev)
348 {
349         u32 tmp;
350
351         /* Setup L2 cache */
352         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
353                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
354                                 EFFECTIVE_L2_QUEUE_SIZE(7));
355         WREG32(VM_L2_CNTL2, 0);
356         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
357         /* Setup TLB control */
358         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
359                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
360                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
361                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
362         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
363         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
364         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
365         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
366         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
367         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
368         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
369         WREG32(VM_CONTEXT0_CNTL, 0);
370         WREG32(VM_CONTEXT1_CNTL, 0);
371 }
372
373 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
374 {
375         save->vga_control[0] = RREG32(D1VGA_CONTROL);
376         save->vga_control[1] = RREG32(D2VGA_CONTROL);
377         save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
378         save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
379         save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
380         save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
381         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
382         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
383         save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
384         save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
385         save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
386         save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
387         save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
388         save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
389
390         /* Stop all video */
391         WREG32(VGA_RENDER_CONTROL, 0);
392         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
393         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
394         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
395         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
396         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
397         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
398         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
399         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
400         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
401         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
402         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
403         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
404         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
405         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
406         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
407         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
408         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
409         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
410
411         WREG32(D1VGA_CONTROL, 0);
412         WREG32(D2VGA_CONTROL, 0);
413         WREG32(EVERGREEN_D3VGA_CONTROL, 0);
414         WREG32(EVERGREEN_D4VGA_CONTROL, 0);
415         WREG32(EVERGREEN_D5VGA_CONTROL, 0);
416         WREG32(EVERGREEN_D6VGA_CONTROL, 0);
417 }
418
419 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
420 {
421         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
422                upper_32_bits(rdev->mc.vram_start));
423         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
424                upper_32_bits(rdev->mc.vram_start));
425         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
426                (u32)rdev->mc.vram_start);
427         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
428                (u32)rdev->mc.vram_start);
429
430         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
431                upper_32_bits(rdev->mc.vram_start));
432         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
433                upper_32_bits(rdev->mc.vram_start));
434         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
435                (u32)rdev->mc.vram_start);
436         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
437                (u32)rdev->mc.vram_start);
438
439         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
440                upper_32_bits(rdev->mc.vram_start));
441         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
442                upper_32_bits(rdev->mc.vram_start));
443         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
444                (u32)rdev->mc.vram_start);
445         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
446                (u32)rdev->mc.vram_start);
447
448         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
449                upper_32_bits(rdev->mc.vram_start));
450         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
451                upper_32_bits(rdev->mc.vram_start));
452         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
453                (u32)rdev->mc.vram_start);
454         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
455                (u32)rdev->mc.vram_start);
456
457         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
458                upper_32_bits(rdev->mc.vram_start));
459         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
460                upper_32_bits(rdev->mc.vram_start));
461         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
462                (u32)rdev->mc.vram_start);
463         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
464                (u32)rdev->mc.vram_start);
465
466         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
467                upper_32_bits(rdev->mc.vram_start));
468         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
469                upper_32_bits(rdev->mc.vram_start));
470         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
471                (u32)rdev->mc.vram_start);
472         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
473                (u32)rdev->mc.vram_start);
474
475         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
476         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
477         /* Unlock host access */
478         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
479         mdelay(1);
480         /* Restore video state */
481         WREG32(D1VGA_CONTROL, save->vga_control[0]);
482         WREG32(D2VGA_CONTROL, save->vga_control[1]);
483         WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
484         WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
485         WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
486         WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
487         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
488         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
489         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
490         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
491         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
492         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
493         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
494         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
495         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
496         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
497         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
498         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
499         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
500         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
501         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
502         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
503         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
504         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
505         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
506 }
507
508 static void evergreen_mc_program(struct radeon_device *rdev)
509 {
510         struct evergreen_mc_save save;
511         u32 tmp;
512         int i, j;
513
514         /* Initialize HDP */
515         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
516                 WREG32((0x2c14 + j), 0x00000000);
517                 WREG32((0x2c18 + j), 0x00000000);
518                 WREG32((0x2c1c + j), 0x00000000);
519                 WREG32((0x2c20 + j), 0x00000000);
520                 WREG32((0x2c24 + j), 0x00000000);
521         }
522         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
523
524         evergreen_mc_stop(rdev, &save);
525         if (evergreen_mc_wait_for_idle(rdev)) {
526                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
527         }
528         /* Lockout access through VGA aperture*/
529         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
530         /* Update configuration */
531         if (rdev->flags & RADEON_IS_AGP) {
532                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
533                         /* VRAM before AGP */
534                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
535                                 rdev->mc.vram_start >> 12);
536                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
537                                 rdev->mc.gtt_end >> 12);
538                 } else {
539                         /* VRAM after AGP */
540                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
541                                 rdev->mc.gtt_start >> 12);
542                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
543                                 rdev->mc.vram_end >> 12);
544                 }
545         } else {
546                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
547                         rdev->mc.vram_start >> 12);
548                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
549                         rdev->mc.vram_end >> 12);
550         }
551         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
552         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
553         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
554         WREG32(MC_VM_FB_LOCATION, tmp);
555         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
556         WREG32(HDP_NONSURFACE_INFO, (2 << 7));
557         WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
558         if (rdev->flags & RADEON_IS_AGP) {
559                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
560                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
561                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
562         } else {
563                 WREG32(MC_VM_AGP_BASE, 0);
564                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
565                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
566         }
567         if (evergreen_mc_wait_for_idle(rdev)) {
568                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
569         }
570         evergreen_mc_resume(rdev, &save);
571         /* we need to own VRAM, so turn off the VGA renderer here
572          * to stop it overwriting our objects */
573         rv515_vga_render_disable(rdev);
574 }
575
576 /*
577  * CP.
578  */
579
580 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
581 {
582         const __be32 *fw_data;
583         int i;
584
585         if (!rdev->me_fw || !rdev->pfp_fw)
586                 return -EINVAL;
587
588         r700_cp_stop(rdev);
589         WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
590
591         fw_data = (const __be32 *)rdev->pfp_fw->data;
592         WREG32(CP_PFP_UCODE_ADDR, 0);
593         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
594                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
595         WREG32(CP_PFP_UCODE_ADDR, 0);
596
597         fw_data = (const __be32 *)rdev->me_fw->data;
598         WREG32(CP_ME_RAM_WADDR, 0);
599         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
600                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
601
602         WREG32(CP_PFP_UCODE_ADDR, 0);
603         WREG32(CP_ME_RAM_WADDR, 0);
604         WREG32(CP_ME_RAM_RADDR, 0);
605         return 0;
606 }
607
608 int evergreen_cp_resume(struct radeon_device *rdev)
609 {
610         u32 tmp;
611         u32 rb_bufsz;
612         int r;
613
614         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
615         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
616                                  SOFT_RESET_PA |
617                                  SOFT_RESET_SH |
618                                  SOFT_RESET_VGT |
619                                  SOFT_RESET_SX));
620         RREG32(GRBM_SOFT_RESET);
621         mdelay(15);
622         WREG32(GRBM_SOFT_RESET, 0);
623         RREG32(GRBM_SOFT_RESET);
624
625         /* Set ring buffer size */
626         rb_bufsz = drm_order(rdev->cp.ring_size / 8);
627         tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
628 #ifdef __BIG_ENDIAN
629         tmp |= BUF_SWAP_32BIT;
630 #endif
631         WREG32(CP_RB_CNTL, tmp);
632         WREG32(CP_SEM_WAIT_TIMER, 0x4);
633
634         /* Set the write pointer delay */
635         WREG32(CP_RB_WPTR_DELAY, 0);
636
637         /* Initialize the ring buffer's read and write pointers */
638         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
639         WREG32(CP_RB_RPTR_WR, 0);
640         WREG32(CP_RB_WPTR, 0);
641         WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
642         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
643         mdelay(1);
644         WREG32(CP_RB_CNTL, tmp);
645
646         WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
647         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
648
649         rdev->cp.rptr = RREG32(CP_RB_RPTR);
650         rdev->cp.wptr = RREG32(CP_RB_WPTR);
651
652         r600_cp_start(rdev);
653         rdev->cp.ready = true;
654         r = radeon_ring_test(rdev);
655         if (r) {
656                 rdev->cp.ready = false;
657                 return r;
658         }
659         return 0;
660 }
661
662 /*
663  * Core functions
664  */
665 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
666                                                   u32 num_tile_pipes,
667                                                   u32 num_backends,
668                                                   u32 backend_disable_mask)
669 {
670         u32 backend_map = 0;
671         u32 enabled_backends_mask = 0;
672         u32 enabled_backends_count = 0;
673         u32 cur_pipe;
674         u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
675         u32 cur_backend = 0;
676         u32 i;
677         bool force_no_swizzle;
678
679         if (num_tile_pipes > EVERGREEN_MAX_PIPES)
680                 num_tile_pipes = EVERGREEN_MAX_PIPES;
681         if (num_tile_pipes < 1)
682                 num_tile_pipes = 1;
683         if (num_backends > EVERGREEN_MAX_BACKENDS)
684                 num_backends = EVERGREEN_MAX_BACKENDS;
685         if (num_backends < 1)
686                 num_backends = 1;
687
688         for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
689                 if (((backend_disable_mask >> i) & 1) == 0) {
690                         enabled_backends_mask |= (1 << i);
691                         ++enabled_backends_count;
692                 }
693                 if (enabled_backends_count == num_backends)
694                         break;
695         }
696
697         if (enabled_backends_count == 0) {
698                 enabled_backends_mask = 1;
699                 enabled_backends_count = 1;
700         }
701
702         if (enabled_backends_count != num_backends)
703                 num_backends = enabled_backends_count;
704
705         memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
706         switch (rdev->family) {
707         case CHIP_CEDAR:
708         case CHIP_REDWOOD:
709                 force_no_swizzle = false;
710                 break;
711         case CHIP_CYPRESS:
712         case CHIP_HEMLOCK:
713         case CHIP_JUNIPER:
714         default:
715                 force_no_swizzle = true;
716                 break;
717         }
718         if (force_no_swizzle) {
719                 bool last_backend_enabled = false;
720
721                 force_no_swizzle = false;
722                 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
723                         if (((enabled_backends_mask >> i) & 1) == 1) {
724                                 if (last_backend_enabled)
725                                         force_no_swizzle = true;
726                                 last_backend_enabled = true;
727                         } else
728                                 last_backend_enabled = false;
729                 }
730         }
731
732         switch (num_tile_pipes) {
733         case 1:
734         case 3:
735         case 5:
736         case 7:
737                 DRM_ERROR("odd number of pipes!\n");
738                 break;
739         case 2:
740                 swizzle_pipe[0] = 0;
741                 swizzle_pipe[1] = 1;
742                 break;
743         case 4:
744                 if (force_no_swizzle) {
745                         swizzle_pipe[0] = 0;
746                         swizzle_pipe[1] = 1;
747                         swizzle_pipe[2] = 2;
748                         swizzle_pipe[3] = 3;
749                 } else {
750                         swizzle_pipe[0] = 0;
751                         swizzle_pipe[1] = 2;
752                         swizzle_pipe[2] = 1;
753                         swizzle_pipe[3] = 3;
754                 }
755                 break;
756         case 6:
757                 if (force_no_swizzle) {
758                         swizzle_pipe[0] = 0;
759                         swizzle_pipe[1] = 1;
760                         swizzle_pipe[2] = 2;
761                         swizzle_pipe[3] = 3;
762                         swizzle_pipe[4] = 4;
763                         swizzle_pipe[5] = 5;
764                 } else {
765                         swizzle_pipe[0] = 0;
766                         swizzle_pipe[1] = 2;
767                         swizzle_pipe[2] = 4;
768                         swizzle_pipe[3] = 1;
769                         swizzle_pipe[4] = 3;
770                         swizzle_pipe[5] = 5;
771                 }
772                 break;
773         case 8:
774                 if (force_no_swizzle) {
775                         swizzle_pipe[0] = 0;
776                         swizzle_pipe[1] = 1;
777                         swizzle_pipe[2] = 2;
778                         swizzle_pipe[3] = 3;
779                         swizzle_pipe[4] = 4;
780                         swizzle_pipe[5] = 5;
781                         swizzle_pipe[6] = 6;
782                         swizzle_pipe[7] = 7;
783                 } else {
784                         swizzle_pipe[0] = 0;
785                         swizzle_pipe[1] = 2;
786                         swizzle_pipe[2] = 4;
787                         swizzle_pipe[3] = 6;
788                         swizzle_pipe[4] = 1;
789                         swizzle_pipe[5] = 3;
790                         swizzle_pipe[6] = 5;
791                         swizzle_pipe[7] = 7;
792                 }
793                 break;
794         }
795
796         for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
797                 while (((1 << cur_backend) & enabled_backends_mask) == 0)
798                         cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
799
800                 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
801
802                 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
803         }
804
805         return backend_map;
806 }
807
808 static void evergreen_gpu_init(struct radeon_device *rdev)
809 {
810         u32 cc_rb_backend_disable = 0;
811         u32 cc_gc_shader_pipe_config;
812         u32 gb_addr_config = 0;
813         u32 mc_shared_chmap, mc_arb_ramcfg;
814         u32 gb_backend_map;
815         u32 grbm_gfx_index;
816         u32 sx_debug_1;
817         u32 smx_dc_ctl0;
818         u32 sq_config;
819         u32 sq_lds_resource_mgmt;
820         u32 sq_gpr_resource_mgmt_1;
821         u32 sq_gpr_resource_mgmt_2;
822         u32 sq_gpr_resource_mgmt_3;
823         u32 sq_thread_resource_mgmt;
824         u32 sq_thread_resource_mgmt_2;
825         u32 sq_stack_resource_mgmt_1;
826         u32 sq_stack_resource_mgmt_2;
827         u32 sq_stack_resource_mgmt_3;
828         u32 vgt_cache_invalidation;
829         u32 hdp_host_path_cntl;
830         int i, j, num_shader_engines, ps_thread_count;
831
832         switch (rdev->family) {
833         case CHIP_CYPRESS:
834         case CHIP_HEMLOCK:
835                 rdev->config.evergreen.num_ses = 2;
836                 rdev->config.evergreen.max_pipes = 4;
837                 rdev->config.evergreen.max_tile_pipes = 8;
838                 rdev->config.evergreen.max_simds = 10;
839                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
840                 rdev->config.evergreen.max_gprs = 256;
841                 rdev->config.evergreen.max_threads = 248;
842                 rdev->config.evergreen.max_gs_threads = 32;
843                 rdev->config.evergreen.max_stack_entries = 512;
844                 rdev->config.evergreen.sx_num_of_sets = 4;
845                 rdev->config.evergreen.sx_max_export_size = 256;
846                 rdev->config.evergreen.sx_max_export_pos_size = 64;
847                 rdev->config.evergreen.sx_max_export_smx_size = 192;
848                 rdev->config.evergreen.max_hw_contexts = 8;
849                 rdev->config.evergreen.sq_num_cf_insts = 2;
850
851                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
852                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
853                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
854                 break;
855         case CHIP_JUNIPER:
856                 rdev->config.evergreen.num_ses = 1;
857                 rdev->config.evergreen.max_pipes = 4;
858                 rdev->config.evergreen.max_tile_pipes = 4;
859                 rdev->config.evergreen.max_simds = 10;
860                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
861                 rdev->config.evergreen.max_gprs = 256;
862                 rdev->config.evergreen.max_threads = 248;
863                 rdev->config.evergreen.max_gs_threads = 32;
864                 rdev->config.evergreen.max_stack_entries = 512;
865                 rdev->config.evergreen.sx_num_of_sets = 4;
866                 rdev->config.evergreen.sx_max_export_size = 256;
867                 rdev->config.evergreen.sx_max_export_pos_size = 64;
868                 rdev->config.evergreen.sx_max_export_smx_size = 192;
869                 rdev->config.evergreen.max_hw_contexts = 8;
870                 rdev->config.evergreen.sq_num_cf_insts = 2;
871
872                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
873                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
874                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
875                 break;
876         case CHIP_REDWOOD:
877                 rdev->config.evergreen.num_ses = 1;
878                 rdev->config.evergreen.max_pipes = 4;
879                 rdev->config.evergreen.max_tile_pipes = 4;
880                 rdev->config.evergreen.max_simds = 5;
881                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
882                 rdev->config.evergreen.max_gprs = 256;
883                 rdev->config.evergreen.max_threads = 248;
884                 rdev->config.evergreen.max_gs_threads = 32;
885                 rdev->config.evergreen.max_stack_entries = 256;
886                 rdev->config.evergreen.sx_num_of_sets = 4;
887                 rdev->config.evergreen.sx_max_export_size = 256;
888                 rdev->config.evergreen.sx_max_export_pos_size = 64;
889                 rdev->config.evergreen.sx_max_export_smx_size = 192;
890                 rdev->config.evergreen.max_hw_contexts = 8;
891                 rdev->config.evergreen.sq_num_cf_insts = 2;
892
893                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
894                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
895                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
896                 break;
897         case CHIP_CEDAR:
898         default:
899                 rdev->config.evergreen.num_ses = 1;
900                 rdev->config.evergreen.max_pipes = 2;
901                 rdev->config.evergreen.max_tile_pipes = 2;
902                 rdev->config.evergreen.max_simds = 2;
903                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
904                 rdev->config.evergreen.max_gprs = 256;
905                 rdev->config.evergreen.max_threads = 192;
906                 rdev->config.evergreen.max_gs_threads = 16;
907                 rdev->config.evergreen.max_stack_entries = 256;
908                 rdev->config.evergreen.sx_num_of_sets = 4;
909                 rdev->config.evergreen.sx_max_export_size = 128;
910                 rdev->config.evergreen.sx_max_export_pos_size = 32;
911                 rdev->config.evergreen.sx_max_export_smx_size = 96;
912                 rdev->config.evergreen.max_hw_contexts = 4;
913                 rdev->config.evergreen.sq_num_cf_insts = 1;
914
915                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
916                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
917                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
918                 break;
919         }
920
921         /* Initialize HDP */
922         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
923                 WREG32((0x2c14 + j), 0x00000000);
924                 WREG32((0x2c18 + j), 0x00000000);
925                 WREG32((0x2c1c + j), 0x00000000);
926                 WREG32((0x2c20 + j), 0x00000000);
927                 WREG32((0x2c24 + j), 0x00000000);
928         }
929
930         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
931
932         cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
933
934         cc_gc_shader_pipe_config |=
935                 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
936                                   & EVERGREEN_MAX_PIPES_MASK);
937         cc_gc_shader_pipe_config |=
938                 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
939                                & EVERGREEN_MAX_SIMDS_MASK);
940
941         cc_rb_backend_disable =
942                 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
943                                 & EVERGREEN_MAX_BACKENDS_MASK);
944
945
946         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
947         mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
948
949         switch (rdev->config.evergreen.max_tile_pipes) {
950         case 1:
951         default:
952                 gb_addr_config |= NUM_PIPES(0);
953                 break;
954         case 2:
955                 gb_addr_config |= NUM_PIPES(1);
956                 break;
957         case 4:
958                 gb_addr_config |= NUM_PIPES(2);
959                 break;
960         case 8:
961                 gb_addr_config |= NUM_PIPES(3);
962                 break;
963         }
964
965         gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
966         gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
967         gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
968         gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
969         gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
970         gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
971
972         if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
973                 gb_addr_config |= ROW_SIZE(2);
974         else
975                 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
976
977         if (rdev->ddev->pdev->device == 0x689e) {
978                 u32 efuse_straps_4;
979                 u32 efuse_straps_3;
980                 u8 efuse_box_bit_131_124;
981
982                 WREG32(RCU_IND_INDEX, 0x204);
983                 efuse_straps_4 = RREG32(RCU_IND_DATA);
984                 WREG32(RCU_IND_INDEX, 0x203);
985                 efuse_straps_3 = RREG32(RCU_IND_DATA);
986                 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
987
988                 switch(efuse_box_bit_131_124) {
989                 case 0x00:
990                         gb_backend_map = 0x76543210;
991                         break;
992                 case 0x55:
993                         gb_backend_map = 0x77553311;
994                         break;
995                 case 0x56:
996                         gb_backend_map = 0x77553300;
997                         break;
998                 case 0x59:
999                         gb_backend_map = 0x77552211;
1000                         break;
1001                 case 0x66:
1002                         gb_backend_map = 0x77443300;
1003                         break;
1004                 case 0x99:
1005                         gb_backend_map = 0x66552211;
1006                         break;
1007                 case 0x5a:
1008                         gb_backend_map = 0x77552200;
1009                         break;
1010                 case 0xaa:
1011                         gb_backend_map = 0x66442200;
1012                         break;
1013                 case 0x95:
1014                         gb_backend_map = 0x66553311;
1015                         break;
1016                 default:
1017                         DRM_ERROR("bad backend map, using default\n");
1018                         gb_backend_map =
1019                                 evergreen_get_tile_pipe_to_backend_map(rdev,
1020                                                                        rdev->config.evergreen.max_tile_pipes,
1021                                                                        rdev->config.evergreen.max_backends,
1022                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
1023                                                                    rdev->config.evergreen.max_backends) &
1024                                                                         EVERGREEN_MAX_BACKENDS_MASK));
1025                         break;
1026                 }
1027         } else if (rdev->ddev->pdev->device == 0x68b9) {
1028                 u32 efuse_straps_3;
1029                 u8 efuse_box_bit_127_124;
1030
1031                 WREG32(RCU_IND_INDEX, 0x203);
1032                 efuse_straps_3 = RREG32(RCU_IND_DATA);
1033                 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
1034
1035                 switch(efuse_box_bit_127_124) {
1036                 case 0x0:
1037                         gb_backend_map = 0x00003210;
1038                         break;
1039                 case 0x5:
1040                 case 0x6:
1041                 case 0x9:
1042                 case 0xa:
1043                         gb_backend_map = 0x00003311;
1044                         break;
1045                 default:
1046                         DRM_ERROR("bad backend map, using default\n");
1047                         gb_backend_map =
1048                                 evergreen_get_tile_pipe_to_backend_map(rdev,
1049                                                                        rdev->config.evergreen.max_tile_pipes,
1050                                                                        rdev->config.evergreen.max_backends,
1051                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
1052                                                                    rdev->config.evergreen.max_backends) &
1053                                                                         EVERGREEN_MAX_BACKENDS_MASK));
1054                         break;
1055                 }
1056         } else
1057                 gb_backend_map =
1058                         evergreen_get_tile_pipe_to_backend_map(rdev,
1059                                                                rdev->config.evergreen.max_tile_pipes,
1060                                                                rdev->config.evergreen.max_backends,
1061                                                                ((EVERGREEN_MAX_BACKENDS_MASK <<
1062                                                                  rdev->config.evergreen.max_backends) &
1063                                                                 EVERGREEN_MAX_BACKENDS_MASK));
1064
1065         WREG32(GB_BACKEND_MAP, gb_backend_map);
1066         WREG32(GB_ADDR_CONFIG, gb_addr_config);
1067         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1068         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1069
1070         num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1071         grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1072
1073         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1074                 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1075                 u32 sp = cc_gc_shader_pipe_config;
1076                 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1077
1078                 if (i == num_shader_engines) {
1079                         rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1080                         sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1081                 }
1082
1083                 WREG32(GRBM_GFX_INDEX, gfx);
1084                 WREG32(RLC_GFX_INDEX, gfx);
1085
1086                 WREG32(CC_RB_BACKEND_DISABLE, rb);
1087                 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1088                 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1089                 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1090         }
1091
1092         grbm_gfx_index |= SE_BROADCAST_WRITES;
1093         WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1094         WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1095
1096         WREG32(CGTS_SYS_TCC_DISABLE, 0);
1097         WREG32(CGTS_TCC_DISABLE, 0);
1098         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1099         WREG32(CGTS_USER_TCC_DISABLE, 0);
1100
1101         /* set HW defaults for 3D engine */
1102         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1103                                      ROQ_IB2_START(0x2b)));
1104
1105         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1106
1107         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1108                              SYNC_GRADIENT |
1109                              SYNC_WALKER |
1110                              SYNC_ALIGNER));
1111
1112         sx_debug_1 = RREG32(SX_DEBUG_1);
1113         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1114         WREG32(SX_DEBUG_1, sx_debug_1);
1115
1116
1117         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1118         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1119         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1120         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1121
1122         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1123                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1124                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1125
1126         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1127                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1128                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1129
1130         WREG32(VGT_NUM_INSTANCES, 1);
1131         WREG32(SPI_CONFIG_CNTL, 0);
1132         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1133         WREG32(CP_PERFMON_CNTL, 0);
1134
1135         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1136                                   FETCH_FIFO_HIWATER(0x4) |
1137                                   DONE_FIFO_HIWATER(0xe0) |
1138                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
1139
1140         sq_config = RREG32(SQ_CONFIG);
1141         sq_config &= ~(PS_PRIO(3) |
1142                        VS_PRIO(3) |
1143                        GS_PRIO(3) |
1144                        ES_PRIO(3));
1145         sq_config |= (VC_ENABLE |
1146                       EXPORT_SRC_C |
1147                       PS_PRIO(0) |
1148                       VS_PRIO(1) |
1149                       GS_PRIO(2) |
1150                       ES_PRIO(3));
1151
1152         if (rdev->family == CHIP_CEDAR)
1153                 /* no vertex cache */
1154                 sq_config &= ~VC_ENABLE;
1155
1156         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1157
1158         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1159         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1160         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1161         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1162         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1163         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1164         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1165
1166         if (rdev->family == CHIP_CEDAR)
1167                 ps_thread_count = 96;
1168         else
1169                 ps_thread_count = 128;
1170
1171         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1172         sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1173         sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1174         sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1175         sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1176         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1177
1178         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1179         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1180         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1181         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1182         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1183         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1184
1185         WREG32(SQ_CONFIG, sq_config);
1186         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1187         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1188         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1189         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1190         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1191         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1192         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1193         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1194         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1195         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1196
1197         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1198                                           FORCE_EOV_MAX_REZ_CNT(255)));
1199
1200         if (rdev->family == CHIP_CEDAR)
1201                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1202         else
1203                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1204         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1205         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1206
1207         WREG32(VGT_GS_VERTEX_REUSE, 16);
1208         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1209
1210         WREG32(CB_PERF_CTR0_SEL_0, 0);
1211         WREG32(CB_PERF_CTR0_SEL_1, 0);
1212         WREG32(CB_PERF_CTR1_SEL_0, 0);
1213         WREG32(CB_PERF_CTR1_SEL_1, 0);
1214         WREG32(CB_PERF_CTR2_SEL_0, 0);
1215         WREG32(CB_PERF_CTR2_SEL_1, 0);
1216         WREG32(CB_PERF_CTR3_SEL_0, 0);
1217         WREG32(CB_PERF_CTR3_SEL_1, 0);
1218
1219         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1220         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1221
1222         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1223
1224         udelay(50);
1225
1226 }
1227
1228 int evergreen_mc_init(struct radeon_device *rdev)
1229 {
1230         u32 tmp;
1231         int chansize, numchan;
1232
1233         /* Get VRAM informations */
1234         rdev->mc.vram_is_ddr = true;
1235         tmp = RREG32(MC_ARB_RAMCFG);
1236         if (tmp & CHANSIZE_OVERRIDE) {
1237                 chansize = 16;
1238         } else if (tmp & CHANSIZE_MASK) {
1239                 chansize = 64;
1240         } else {
1241                 chansize = 32;
1242         }
1243         tmp = RREG32(MC_SHARED_CHMAP);
1244         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1245         case 0:
1246         default:
1247                 numchan = 1;
1248                 break;
1249         case 1:
1250                 numchan = 2;
1251                 break;
1252         case 2:
1253                 numchan = 4;
1254                 break;
1255         case 3:
1256                 numchan = 8;
1257                 break;
1258         }
1259         rdev->mc.vram_width = numchan * chansize;
1260         /* Could aper size report 0 ? */
1261         rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1262         rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1263         /* Setup GPU memory space */
1264         /* size in MB on evergreen */
1265         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1266         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1267         rdev->mc.visible_vram_size = rdev->mc.aper_size;
1268         /* FIXME remove this once we support unmappable VRAM */
1269         if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
1270                 rdev->mc.mc_vram_size = rdev->mc.aper_size;
1271                 rdev->mc.real_vram_size = rdev->mc.aper_size;
1272         }
1273         r600_vram_gtt_location(rdev, &rdev->mc);
1274         radeon_update_bandwidth_info(rdev);
1275
1276         return 0;
1277 }
1278
1279 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1280 {
1281         /* FIXME: implement for evergreen */
1282         return false;
1283 }
1284
1285 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1286 {
1287         struct evergreen_mc_save save;
1288         u32 srbm_reset = 0;
1289         u32 grbm_reset = 0;
1290
1291         dev_info(rdev->dev, "GPU softreset \n");
1292         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1293                 RREG32(GRBM_STATUS));
1294         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1295                 RREG32(GRBM_STATUS_SE0));
1296         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1297                 RREG32(GRBM_STATUS_SE1));
1298         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1299                 RREG32(SRBM_STATUS));
1300         evergreen_mc_stop(rdev, &save);
1301         if (evergreen_mc_wait_for_idle(rdev)) {
1302                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1303         }
1304         /* Disable CP parsing/prefetching */
1305         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1306
1307         /* reset all the gfx blocks */
1308         grbm_reset = (SOFT_RESET_CP |
1309                       SOFT_RESET_CB |
1310                       SOFT_RESET_DB |
1311                       SOFT_RESET_PA |
1312                       SOFT_RESET_SC |
1313                       SOFT_RESET_SPI |
1314                       SOFT_RESET_SH |
1315                       SOFT_RESET_SX |
1316                       SOFT_RESET_TC |
1317                       SOFT_RESET_TA |
1318                       SOFT_RESET_VC |
1319                       SOFT_RESET_VGT);
1320
1321         dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1322         WREG32(GRBM_SOFT_RESET, grbm_reset);
1323         (void)RREG32(GRBM_SOFT_RESET);
1324         udelay(50);
1325         WREG32(GRBM_SOFT_RESET, 0);
1326         (void)RREG32(GRBM_SOFT_RESET);
1327
1328         /* reset all the system blocks */
1329         srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1330
1331         dev_info(rdev->dev, "  SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1332         WREG32(SRBM_SOFT_RESET, srbm_reset);
1333         (void)RREG32(SRBM_SOFT_RESET);
1334         udelay(50);
1335         WREG32(SRBM_SOFT_RESET, 0);
1336         (void)RREG32(SRBM_SOFT_RESET);
1337         /* Wait a little for things to settle down */
1338         udelay(50);
1339         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1340                 RREG32(GRBM_STATUS));
1341         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1342                 RREG32(GRBM_STATUS_SE0));
1343         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1344                 RREG32(GRBM_STATUS_SE1));
1345         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1346                 RREG32(SRBM_STATUS));
1347         /* After reset we need to reinit the asic as GPU often endup in an
1348          * incoherent state.
1349          */
1350         atom_asic_init(rdev->mode_info.atom_context);
1351         evergreen_mc_resume(rdev, &save);
1352         return 0;
1353 }
1354
1355 int evergreen_asic_reset(struct radeon_device *rdev)
1356 {
1357         return evergreen_gpu_soft_reset(rdev);
1358 }
1359
1360 /* Interrupts */
1361
1362 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1363 {
1364         switch (crtc) {
1365         case 0:
1366                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1367         case 1:
1368                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1369         case 2:
1370                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1371         case 3:
1372                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1373         case 4:
1374                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1375         case 5:
1376                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1377         default:
1378                 return 0;
1379         }
1380 }
1381
1382 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1383 {
1384         u32 tmp;
1385
1386         WREG32(CP_INT_CNTL, 0);
1387         WREG32(GRBM_INT_CNTL, 0);
1388         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1389         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1390         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1391         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1392         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1393         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1394
1395         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1396         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1397         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1398         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1399         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1400         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1401
1402         WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1403         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1404
1405         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1406         WREG32(DC_HPD1_INT_CONTROL, tmp);
1407         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1408         WREG32(DC_HPD2_INT_CONTROL, tmp);
1409         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1410         WREG32(DC_HPD3_INT_CONTROL, tmp);
1411         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1412         WREG32(DC_HPD4_INT_CONTROL, tmp);
1413         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1414         WREG32(DC_HPD5_INT_CONTROL, tmp);
1415         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1416         WREG32(DC_HPD6_INT_CONTROL, tmp);
1417
1418 }
1419
1420 int evergreen_irq_set(struct radeon_device *rdev)
1421 {
1422         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1423         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1424         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1425
1426         if (!rdev->irq.installed) {
1427                 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1428                 return -EINVAL;
1429         }
1430         /* don't enable anything if the ih is disabled */
1431         if (!rdev->ih.enabled) {
1432                 r600_disable_interrupts(rdev);
1433                 /* force the active interrupt state to all disabled */
1434                 evergreen_disable_interrupt_state(rdev);
1435                 return 0;
1436         }
1437
1438         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1439         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1440         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1441         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1442         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1443         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1444
1445         if (rdev->irq.sw_int) {
1446                 DRM_DEBUG("evergreen_irq_set: sw int\n");
1447                 cp_int_cntl |= RB_INT_ENABLE;
1448         }
1449         if (rdev->irq.crtc_vblank_int[0]) {
1450                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1451                 crtc1 |= VBLANK_INT_MASK;
1452         }
1453         if (rdev->irq.crtc_vblank_int[1]) {
1454                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1455                 crtc2 |= VBLANK_INT_MASK;
1456         }
1457         if (rdev->irq.crtc_vblank_int[2]) {
1458                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1459                 crtc3 |= VBLANK_INT_MASK;
1460         }
1461         if (rdev->irq.crtc_vblank_int[3]) {
1462                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1463                 crtc4 |= VBLANK_INT_MASK;
1464         }
1465         if (rdev->irq.crtc_vblank_int[4]) {
1466                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1467                 crtc5 |= VBLANK_INT_MASK;
1468         }
1469         if (rdev->irq.crtc_vblank_int[5]) {
1470                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1471                 crtc6 |= VBLANK_INT_MASK;
1472         }
1473         if (rdev->irq.hpd[0]) {
1474                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1475                 hpd1 |= DC_HPDx_INT_EN;
1476         }
1477         if (rdev->irq.hpd[1]) {
1478                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1479                 hpd2 |= DC_HPDx_INT_EN;
1480         }
1481         if (rdev->irq.hpd[2]) {
1482                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1483                 hpd3 |= DC_HPDx_INT_EN;
1484         }
1485         if (rdev->irq.hpd[3]) {
1486                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1487                 hpd4 |= DC_HPDx_INT_EN;
1488         }
1489         if (rdev->irq.hpd[4]) {
1490                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1491                 hpd5 |= DC_HPDx_INT_EN;
1492         }
1493         if (rdev->irq.hpd[5]) {
1494                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1495                 hpd6 |= DC_HPDx_INT_EN;
1496         }
1497
1498         WREG32(CP_INT_CNTL, cp_int_cntl);
1499
1500         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1501         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1502         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1503         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1504         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1505         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1506
1507         WREG32(DC_HPD1_INT_CONTROL, hpd1);
1508         WREG32(DC_HPD2_INT_CONTROL, hpd2);
1509         WREG32(DC_HPD3_INT_CONTROL, hpd3);
1510         WREG32(DC_HPD4_INT_CONTROL, hpd4);
1511         WREG32(DC_HPD5_INT_CONTROL, hpd5);
1512         WREG32(DC_HPD6_INT_CONTROL, hpd6);
1513
1514         return 0;
1515 }
1516
1517 static inline void evergreen_irq_ack(struct radeon_device *rdev,
1518                                      u32 *disp_int,
1519                                      u32 *disp_int_cont,
1520                                      u32 *disp_int_cont2,
1521                                      u32 *disp_int_cont3,
1522                                      u32 *disp_int_cont4,
1523                                      u32 *disp_int_cont5)
1524 {
1525         u32 tmp;
1526
1527         *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1528         *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1529         *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1530         *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1531         *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1532         *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1533
1534         if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1535                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1536         if (*disp_int & LB_D1_VLINE_INTERRUPT)
1537                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1538
1539         if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1540                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1541         if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1542                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1543
1544         if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1545                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1546         if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1547                 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1548
1549         if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1550                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1551         if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1552                 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1553
1554         if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1555                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1556         if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1557                 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1558
1559         if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1560                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1561         if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1562                 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1563
1564         if (*disp_int & DC_HPD1_INTERRUPT) {
1565                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1566                 tmp |= DC_HPDx_INT_ACK;
1567                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1568         }
1569         if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1570                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1571                 tmp |= DC_HPDx_INT_ACK;
1572                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1573         }
1574         if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1575                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1576                 tmp |= DC_HPDx_INT_ACK;
1577                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1578         }
1579         if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1580                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1581                 tmp |= DC_HPDx_INT_ACK;
1582                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1583         }
1584         if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1585                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1586                 tmp |= DC_HPDx_INT_ACK;
1587                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1588         }
1589         if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1590                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1591                 tmp |= DC_HPDx_INT_ACK;
1592                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1593         }
1594 }
1595
1596 void evergreen_irq_disable(struct radeon_device *rdev)
1597 {
1598         u32 disp_int, disp_int_cont, disp_int_cont2;
1599         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1600
1601         r600_disable_interrupts(rdev);
1602         /* Wait and acknowledge irq */
1603         mdelay(1);
1604         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1605                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1606         evergreen_disable_interrupt_state(rdev);
1607 }
1608
1609 static void evergreen_irq_suspend(struct radeon_device *rdev)
1610 {
1611         evergreen_irq_disable(rdev);
1612         r600_rlc_stop(rdev);
1613 }
1614
1615 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1616 {
1617         u32 wptr, tmp;
1618
1619         /* XXX use writeback */
1620         wptr = RREG32(IH_RB_WPTR);
1621
1622         if (wptr & RB_OVERFLOW) {
1623                 /* When a ring buffer overflow happen start parsing interrupt
1624                  * from the last not overwritten vector (wptr + 16). Hopefully
1625                  * this should allow us to catchup.
1626                  */
1627                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1628                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1629                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1630                 tmp = RREG32(IH_RB_CNTL);
1631                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1632                 WREG32(IH_RB_CNTL, tmp);
1633         }
1634         return (wptr & rdev->ih.ptr_mask);
1635 }
1636
1637 int evergreen_irq_process(struct radeon_device *rdev)
1638 {
1639         u32 wptr = evergreen_get_ih_wptr(rdev);
1640         u32 rptr = rdev->ih.rptr;
1641         u32 src_id, src_data;
1642         u32 ring_index;
1643         u32 disp_int, disp_int_cont, disp_int_cont2;
1644         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1645         unsigned long flags;
1646         bool queue_hotplug = false;
1647
1648         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1649         if (!rdev->ih.enabled)
1650                 return IRQ_NONE;
1651
1652         spin_lock_irqsave(&rdev->ih.lock, flags);
1653
1654         if (rptr == wptr) {
1655                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1656                 return IRQ_NONE;
1657         }
1658         if (rdev->shutdown) {
1659                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1660                 return IRQ_NONE;
1661         }
1662
1663 restart_ih:
1664         /* display interrupts */
1665         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1666                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1667
1668         rdev->ih.wptr = wptr;
1669         while (rptr != wptr) {
1670                 /* wptr/rptr are in bytes! */
1671                 ring_index = rptr / 4;
1672                 src_id =  rdev->ih.ring[ring_index] & 0xff;
1673                 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1674
1675                 switch (src_id) {
1676                 case 1: /* D1 vblank/vline */
1677                         switch (src_data) {
1678                         case 0: /* D1 vblank */
1679                                 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1680                                         drm_handle_vblank(rdev->ddev, 0);
1681                                         wake_up(&rdev->irq.vblank_queue);
1682                                         disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1683                                         DRM_DEBUG("IH: D1 vblank\n");
1684                                 }
1685                                 break;
1686                         case 1: /* D1 vline */
1687                                 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1688                                         disp_int &= ~LB_D1_VLINE_INTERRUPT;
1689                                         DRM_DEBUG("IH: D1 vline\n");
1690                                 }
1691                                 break;
1692                         default:
1693                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1694                                 break;
1695                         }
1696                         break;
1697                 case 2: /* D2 vblank/vline */
1698                         switch (src_data) {
1699                         case 0: /* D2 vblank */
1700                                 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1701                                         drm_handle_vblank(rdev->ddev, 1);
1702                                         wake_up(&rdev->irq.vblank_queue);
1703                                         disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1704                                         DRM_DEBUG("IH: D2 vblank\n");
1705                                 }
1706                                 break;
1707                         case 1: /* D2 vline */
1708                                 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1709                                         disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1710                                         DRM_DEBUG("IH: D2 vline\n");
1711                                 }
1712                                 break;
1713                         default:
1714                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1715                                 break;
1716                         }
1717                         break;
1718                 case 3: /* D3 vblank/vline */
1719                         switch (src_data) {
1720                         case 0: /* D3 vblank */
1721                                 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1722                                         drm_handle_vblank(rdev->ddev, 2);
1723                                         wake_up(&rdev->irq.vblank_queue);
1724                                         disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1725                                         DRM_DEBUG("IH: D3 vblank\n");
1726                                 }
1727                                 break;
1728                         case 1: /* D3 vline */
1729                                 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1730                                         disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1731                                         DRM_DEBUG("IH: D3 vline\n");
1732                                 }
1733                                 break;
1734                         default:
1735                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1736                                 break;
1737                         }
1738                         break;
1739                 case 4: /* D4 vblank/vline */
1740                         switch (src_data) {
1741                         case 0: /* D4 vblank */
1742                                 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1743                                         drm_handle_vblank(rdev->ddev, 3);
1744                                         wake_up(&rdev->irq.vblank_queue);
1745                                         disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1746                                         DRM_DEBUG("IH: D4 vblank\n");
1747                                 }
1748                                 break;
1749                         case 1: /* D4 vline */
1750                                 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1751                                         disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1752                                         DRM_DEBUG("IH: D4 vline\n");
1753                                 }
1754                                 break;
1755                         default:
1756                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1757                                 break;
1758                         }
1759                         break;
1760                 case 5: /* D5 vblank/vline */
1761                         switch (src_data) {
1762                         case 0: /* D5 vblank */
1763                                 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1764                                         drm_handle_vblank(rdev->ddev, 4);
1765                                         wake_up(&rdev->irq.vblank_queue);
1766                                         disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1767                                         DRM_DEBUG("IH: D5 vblank\n");
1768                                 }
1769                                 break;
1770                         case 1: /* D5 vline */
1771                                 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1772                                         disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1773                                         DRM_DEBUG("IH: D5 vline\n");
1774                                 }
1775                                 break;
1776                         default:
1777                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1778                                 break;
1779                         }
1780                         break;
1781                 case 6: /* D6 vblank/vline */
1782                         switch (src_data) {
1783                         case 0: /* D6 vblank */
1784                                 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1785                                         drm_handle_vblank(rdev->ddev, 5);
1786                                         wake_up(&rdev->irq.vblank_queue);
1787                                         disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1788                                         DRM_DEBUG("IH: D6 vblank\n");
1789                                 }
1790                                 break;
1791                         case 1: /* D6 vline */
1792                                 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1793                                         disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1794                                         DRM_DEBUG("IH: D6 vline\n");
1795                                 }
1796                                 break;
1797                         default:
1798                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1799                                 break;
1800                         }
1801                         break;
1802                 case 42: /* HPD hotplug */
1803                         switch (src_data) {
1804                         case 0:
1805                                 if (disp_int & DC_HPD1_INTERRUPT) {
1806                                         disp_int &= ~DC_HPD1_INTERRUPT;
1807                                         queue_hotplug = true;
1808                                         DRM_DEBUG("IH: HPD1\n");
1809                                 }
1810                                 break;
1811                         case 1:
1812                                 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1813                                         disp_int_cont &= ~DC_HPD2_INTERRUPT;
1814                                         queue_hotplug = true;
1815                                         DRM_DEBUG("IH: HPD2\n");
1816                                 }
1817                                 break;
1818                         case 2:
1819                                 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1820                                         disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1821                                         queue_hotplug = true;
1822                                         DRM_DEBUG("IH: HPD3\n");
1823                                 }
1824                                 break;
1825                         case 3:
1826                                 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1827                                         disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1828                                         queue_hotplug = true;
1829                                         DRM_DEBUG("IH: HPD4\n");
1830                                 }
1831                                 break;
1832                         case 4:
1833                                 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1834                                         disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1835                                         queue_hotplug = true;
1836                                         DRM_DEBUG("IH: HPD5\n");
1837                                 }
1838                                 break;
1839                         case 5:
1840                                 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1841                                         disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1842                                         queue_hotplug = true;
1843                                         DRM_DEBUG("IH: HPD6\n");
1844                                 }
1845                                 break;
1846                         default:
1847                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1848                                 break;
1849                         }
1850                         break;
1851                 case 176: /* CP_INT in ring buffer */
1852                 case 177: /* CP_INT in IB1 */
1853                 case 178: /* CP_INT in IB2 */
1854                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1855                         radeon_fence_process(rdev);
1856                         break;
1857                 case 181: /* CP EOP event */
1858                         DRM_DEBUG("IH: CP EOP\n");
1859                         break;
1860                 default:
1861                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1862                         break;
1863                 }
1864
1865                 /* wptr/rptr are in bytes! */
1866                 rptr += 16;
1867                 rptr &= rdev->ih.ptr_mask;
1868         }
1869         /* make sure wptr hasn't changed while processing */
1870         wptr = evergreen_get_ih_wptr(rdev);
1871         if (wptr != rdev->ih.wptr)
1872                 goto restart_ih;
1873         if (queue_hotplug)
1874                 queue_work(rdev->wq, &rdev->hotplug_work);
1875         rdev->ih.rptr = rptr;
1876         WREG32(IH_RB_RPTR, rdev->ih.rptr);
1877         spin_unlock_irqrestore(&rdev->ih.lock, flags);
1878         return IRQ_HANDLED;
1879 }
1880
1881 static int evergreen_startup(struct radeon_device *rdev)
1882 {
1883         int r;
1884
1885         if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1886                 r = r600_init_microcode(rdev);
1887                 if (r) {
1888                         DRM_ERROR("Failed to load firmware!\n");
1889                         return r;
1890                 }
1891         }
1892
1893         evergreen_mc_program(rdev);
1894         if (rdev->flags & RADEON_IS_AGP) {
1895                 evergreen_agp_enable(rdev);
1896         } else {
1897                 r = evergreen_pcie_gart_enable(rdev);
1898                 if (r)
1899                         return r;
1900         }
1901         evergreen_gpu_init(rdev);
1902 #if 0
1903         if (!rdev->r600_blit.shader_obj) {
1904                 r = r600_blit_init(rdev);
1905                 if (r) {
1906                         DRM_ERROR("radeon: failed blitter (%d).\n", r);
1907                         return r;
1908                 }
1909         }
1910
1911         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1912         if (unlikely(r != 0))
1913                 return r;
1914         r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1915                         &rdev->r600_blit.shader_gpu_addr);
1916         radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1917         if (r) {
1918                 DRM_ERROR("failed to pin blit object %d\n", r);
1919                 return r;
1920         }
1921 #endif
1922
1923         /* Enable IRQ */
1924         r = r600_irq_init(rdev);
1925         if (r) {
1926                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1927                 radeon_irq_kms_fini(rdev);
1928                 return r;
1929         }
1930         evergreen_irq_set(rdev);
1931
1932         r = radeon_ring_init(rdev, rdev->cp.ring_size);
1933         if (r)
1934                 return r;
1935         r = evergreen_cp_load_microcode(rdev);
1936         if (r)
1937                 return r;
1938         r = evergreen_cp_resume(rdev);
1939         if (r)
1940                 return r;
1941         /* write back buffer are not vital so don't worry about failure */
1942         r600_wb_enable(rdev);
1943
1944         return 0;
1945 }
1946
1947 int evergreen_resume(struct radeon_device *rdev)
1948 {
1949         int r;
1950
1951         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
1952          * posting will perform necessary task to bring back GPU into good
1953          * shape.
1954          */
1955         /* post card */
1956         atom_asic_init(rdev->mode_info.atom_context);
1957         /* Initialize clocks */
1958         r = radeon_clocks_init(rdev);
1959         if (r) {
1960                 return r;
1961         }
1962
1963         r = evergreen_startup(rdev);
1964         if (r) {
1965                 DRM_ERROR("r600 startup failed on resume\n");
1966                 return r;
1967         }
1968
1969         r = r600_ib_test(rdev);
1970         if (r) {
1971                 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1972                 return r;
1973         }
1974
1975         return r;
1976
1977 }
1978
1979 int evergreen_suspend(struct radeon_device *rdev)
1980 {
1981 #if 0
1982         int r;
1983 #endif
1984         /* FIXME: we should wait for ring to be empty */
1985         r700_cp_stop(rdev);
1986         rdev->cp.ready = false;
1987         evergreen_irq_suspend(rdev);
1988         r600_wb_disable(rdev);
1989         evergreen_pcie_gart_disable(rdev);
1990 #if 0
1991         /* unpin shaders bo */
1992         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1993         if (likely(r == 0)) {
1994                 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1995                 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1996         }
1997 #endif
1998         return 0;
1999 }
2000
2001 static bool evergreen_card_posted(struct radeon_device *rdev)
2002 {
2003         u32 reg;
2004
2005         /* first check CRTCs */
2006         reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2007                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2008                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2009                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2010                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2011                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2012         if (reg & EVERGREEN_CRTC_MASTER_EN)
2013                 return true;
2014
2015         /* then check MEM_SIZE, in case the crtcs are off */
2016         if (RREG32(CONFIG_MEMSIZE))
2017                 return true;
2018
2019         return false;
2020 }
2021
2022 /* Plan is to move initialization in that function and use
2023  * helper function so that radeon_device_init pretty much
2024  * do nothing more than calling asic specific function. This
2025  * should also allow to remove a bunch of callback function
2026  * like vram_info.
2027  */
2028 int evergreen_init(struct radeon_device *rdev)
2029 {
2030         int r;
2031
2032         r = radeon_dummy_page_init(rdev);
2033         if (r)
2034                 return r;
2035         /* This don't do much */
2036         r = radeon_gem_init(rdev);
2037         if (r)
2038                 return r;
2039         /* Read BIOS */
2040         if (!radeon_get_bios(rdev)) {
2041                 if (ASIC_IS_AVIVO(rdev))
2042                         return -EINVAL;
2043         }
2044         /* Must be an ATOMBIOS */
2045         if (!rdev->is_atom_bios) {
2046                 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2047                 return -EINVAL;
2048         }
2049         r = radeon_atombios_init(rdev);
2050         if (r)
2051                 return r;
2052         /* Post card if necessary */
2053         if (!evergreen_card_posted(rdev)) {
2054                 if (!rdev->bios) {
2055                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2056                         return -EINVAL;
2057                 }
2058                 DRM_INFO("GPU not posted. posting now...\n");
2059                 atom_asic_init(rdev->mode_info.atom_context);
2060         }
2061         /* Initialize scratch registers */
2062         r600_scratch_init(rdev);
2063         /* Initialize surface registers */
2064         radeon_surface_init(rdev);
2065         /* Initialize clocks */
2066         radeon_get_clock_info(rdev->ddev);
2067         r = radeon_clocks_init(rdev);
2068         if (r)
2069                 return r;
2070         /* Initialize power management */
2071         radeon_pm_init(rdev);
2072         /* Fence driver */
2073         r = radeon_fence_driver_init(rdev);
2074         if (r)
2075                 return r;
2076         /* initialize AGP */
2077         if (rdev->flags & RADEON_IS_AGP) {
2078                 r = radeon_agp_init(rdev);
2079                 if (r)
2080                         radeon_agp_disable(rdev);
2081         }
2082         /* initialize memory controller */
2083         r = evergreen_mc_init(rdev);
2084         if (r)
2085                 return r;
2086         /* Memory manager */
2087         r = radeon_bo_init(rdev);
2088         if (r)
2089                 return r;
2090
2091         r = radeon_irq_kms_init(rdev);
2092         if (r)
2093                 return r;
2094
2095         rdev->cp.ring_obj = NULL;
2096         r600_ring_init(rdev, 1024 * 1024);
2097
2098         rdev->ih.ring_obj = NULL;
2099         r600_ih_ring_init(rdev, 64 * 1024);
2100
2101         r = r600_pcie_gart_init(rdev);
2102         if (r)
2103                 return r;
2104
2105         rdev->accel_working = false;
2106         r = evergreen_startup(rdev);
2107         if (r) {
2108                 dev_err(rdev->dev, "disabling GPU acceleration\n");
2109                 r700_cp_fini(rdev);
2110                 r600_wb_fini(rdev);
2111                 r600_irq_fini(rdev);
2112                 radeon_irq_kms_fini(rdev);
2113                 evergreen_pcie_gart_fini(rdev);
2114                 rdev->accel_working = false;
2115         }
2116         if (rdev->accel_working) {
2117                 r = radeon_ib_pool_init(rdev);
2118                 if (r) {
2119                         DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2120                         rdev->accel_working = false;
2121                 }
2122                 r = r600_ib_test(rdev);
2123                 if (r) {
2124                         DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2125                         rdev->accel_working = false;
2126                 }
2127         }
2128         return 0;
2129 }
2130
2131 void evergreen_fini(struct radeon_device *rdev)
2132 {
2133         radeon_pm_fini(rdev);
2134         /*r600_blit_fini(rdev);*/
2135         r700_cp_fini(rdev);
2136         r600_wb_fini(rdev);
2137         r600_irq_fini(rdev);
2138         radeon_irq_kms_fini(rdev);
2139         evergreen_pcie_gart_fini(rdev);
2140         radeon_gem_fini(rdev);
2141         radeon_fence_driver_fini(rdev);
2142         radeon_clocks_fini(rdev);
2143         radeon_agp_fini(rdev);
2144         radeon_bo_fini(rdev);
2145         radeon_atombios_fini(rdev);
2146         kfree(rdev->bios);
2147         rdev->bios = NULL;
2148         radeon_dummy_page_fini(rdev);
2149 }