]> bbs.cooldavid.org Git - net-next-2.6.git/blame - drivers/gpu/drm/radeon/evergreen.c
drm/radeon/kms: set gart pages to invalid on unbind and point to dummy page
[net-next-2.6.git] / drivers / gpu / drm / radeon / evergreen.c
CommitLineData
bcc1c2a1
AD
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include "drmP.h"
27#include "radeon.h"
28#include "radeon_drm.h"
29#include "rv770d.h"
30#include "atom.h"
31#include "avivod.h"
32#include "evergreen_reg.h"
33
34static void evergreen_gpu_init(struct radeon_device *rdev);
35void evergreen_fini(struct radeon_device *rdev);
36
37bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
38{
39 bool connected = false;
40 /* XXX */
41 return connected;
42}
43
44void evergreen_hpd_set_polarity(struct radeon_device *rdev,
45 enum radeon_hpd_id hpd)
46{
47 /* XXX */
48}
49
50void evergreen_hpd_init(struct radeon_device *rdev)
51{
52 /* XXX */
53}
54
55
56void evergreen_bandwidth_update(struct radeon_device *rdev)
57{
58 /* XXX */
59}
60
61void evergreen_hpd_fini(struct radeon_device *rdev)
62{
63 /* XXX */
64}
65
66static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
67{
68 unsigned i;
69 u32 tmp;
70
71 for (i = 0; i < rdev->usec_timeout; i++) {
72 /* read MC_STATUS */
73 tmp = RREG32(SRBM_STATUS) & 0x1F00;
74 if (!tmp)
75 return 0;
76 udelay(1);
77 }
78 return -1;
79}
80
81/*
82 * GART
83 */
84int evergreen_pcie_gart_enable(struct radeon_device *rdev)
85{
86 u32 tmp;
87 int r, i;
88
89 if (rdev->gart.table.vram.robj == NULL) {
90 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
91 return -EINVAL;
92 }
93 r = radeon_gart_table_vram_pin(rdev);
94 if (r)
95 return r;
82568565 96 radeon_gart_restore(rdev);
bcc1c2a1
AD
97 /* Setup L2 cache */
98 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
99 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
100 EFFECTIVE_L2_QUEUE_SIZE(7));
101 WREG32(VM_L2_CNTL2, 0);
102 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
103 /* Setup TLB control */
104 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
105 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
106 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
107 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
108 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
109 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
110 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
111 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
112 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
113 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
114 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
115 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
116 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
117 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
118 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
119 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
120 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
121 (u32)(rdev->dummy_page.addr >> 12));
122 for (i = 1; i < 7; i++)
123 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
124
125 r600_pcie_gart_tlb_flush(rdev);
126 rdev->gart.ready = true;
127 return 0;
128}
129
130void evergreen_pcie_gart_disable(struct radeon_device *rdev)
131{
132 u32 tmp;
133 int i, r;
134
135 /* Disable all tables */
136 for (i = 0; i < 7; i++)
137 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
138
139 /* Setup L2 cache */
140 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
141 EFFECTIVE_L2_QUEUE_SIZE(7));
142 WREG32(VM_L2_CNTL2, 0);
143 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
144 /* Setup TLB control */
145 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
146 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
147 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
148 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
149 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
150 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
151 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
152 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
153 if (rdev->gart.table.vram.robj) {
154 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
155 if (likely(r == 0)) {
156 radeon_bo_kunmap(rdev->gart.table.vram.robj);
157 radeon_bo_unpin(rdev->gart.table.vram.robj);
158 radeon_bo_unreserve(rdev->gart.table.vram.robj);
159 }
160 }
161}
162
163void evergreen_pcie_gart_fini(struct radeon_device *rdev)
164{
165 evergreen_pcie_gart_disable(rdev);
166 radeon_gart_table_vram_free(rdev);
167 radeon_gart_fini(rdev);
168}
169
170
171void evergreen_agp_enable(struct radeon_device *rdev)
172{
173 u32 tmp;
174 int i;
175
176 /* Setup L2 cache */
177 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
178 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
179 EFFECTIVE_L2_QUEUE_SIZE(7));
180 WREG32(VM_L2_CNTL2, 0);
181 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
182 /* Setup TLB control */
183 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
184 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
185 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
186 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
187 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
188 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
189 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
190 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
191 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
192 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
193 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
194 for (i = 0; i < 7; i++)
195 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
196}
197
198static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
199{
200 save->vga_control[0] = RREG32(D1VGA_CONTROL);
201 save->vga_control[1] = RREG32(D2VGA_CONTROL);
202 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
203 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
204 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
205 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
206 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
207 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
208 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
209 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
210 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
211 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
212 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
213 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
214
215 /* Stop all video */
216 WREG32(VGA_RENDER_CONTROL, 0);
217 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
218 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
219 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
220 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
221 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
222 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
223 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
224 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
225 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
226 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
227 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
228 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
229 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
230 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
231 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
232 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
233 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
234 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
235
236 WREG32(D1VGA_CONTROL, 0);
237 WREG32(D2VGA_CONTROL, 0);
238 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
239 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
240 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
241 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
242}
243
244static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
245{
246 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
247 upper_32_bits(rdev->mc.vram_start));
248 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
249 upper_32_bits(rdev->mc.vram_start));
250 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
251 (u32)rdev->mc.vram_start);
252 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
253 (u32)rdev->mc.vram_start);
254
255 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
256 upper_32_bits(rdev->mc.vram_start));
257 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
258 upper_32_bits(rdev->mc.vram_start));
259 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
260 (u32)rdev->mc.vram_start);
261 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
262 (u32)rdev->mc.vram_start);
263
264 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
265 upper_32_bits(rdev->mc.vram_start));
266 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
267 upper_32_bits(rdev->mc.vram_start));
268 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
269 (u32)rdev->mc.vram_start);
270 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
271 (u32)rdev->mc.vram_start);
272
273 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
274 upper_32_bits(rdev->mc.vram_start));
275 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
276 upper_32_bits(rdev->mc.vram_start));
277 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
278 (u32)rdev->mc.vram_start);
279 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
280 (u32)rdev->mc.vram_start);
281
282 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
283 upper_32_bits(rdev->mc.vram_start));
284 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
285 upper_32_bits(rdev->mc.vram_start));
286 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
287 (u32)rdev->mc.vram_start);
288 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
289 (u32)rdev->mc.vram_start);
290
291 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
292 upper_32_bits(rdev->mc.vram_start));
293 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
294 upper_32_bits(rdev->mc.vram_start));
295 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
296 (u32)rdev->mc.vram_start);
297 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
298 (u32)rdev->mc.vram_start);
299
300 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
301 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
302 /* Unlock host access */
303 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
304 mdelay(1);
305 /* Restore video state */
306 WREG32(D1VGA_CONTROL, save->vga_control[0]);
307 WREG32(D2VGA_CONTROL, save->vga_control[1]);
308 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
309 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
310 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
311 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
312 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
313 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
314 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
315 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
316 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
317 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
318 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
319 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
320 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
321 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
322 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
323 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
324 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
325 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
326 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
327 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
328 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
329 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
330 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
331}
332
333static void evergreen_mc_program(struct radeon_device *rdev)
334{
335 struct evergreen_mc_save save;
336 u32 tmp;
337 int i, j;
338
339 /* Initialize HDP */
340 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
341 WREG32((0x2c14 + j), 0x00000000);
342 WREG32((0x2c18 + j), 0x00000000);
343 WREG32((0x2c1c + j), 0x00000000);
344 WREG32((0x2c20 + j), 0x00000000);
345 WREG32((0x2c24 + j), 0x00000000);
346 }
347 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
348
349 evergreen_mc_stop(rdev, &save);
350 if (evergreen_mc_wait_for_idle(rdev)) {
351 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
352 }
353 /* Lockout access through VGA aperture*/
354 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
355 /* Update configuration */
356 if (rdev->flags & RADEON_IS_AGP) {
357 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
358 /* VRAM before AGP */
359 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
360 rdev->mc.vram_start >> 12);
361 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
362 rdev->mc.gtt_end >> 12);
363 } else {
364 /* VRAM after AGP */
365 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
366 rdev->mc.gtt_start >> 12);
367 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
368 rdev->mc.vram_end >> 12);
369 }
370 } else {
371 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
372 rdev->mc.vram_start >> 12);
373 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
374 rdev->mc.vram_end >> 12);
375 }
376 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
377 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
378 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
379 WREG32(MC_VM_FB_LOCATION, tmp);
380 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
381 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
382 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
383 if (rdev->flags & RADEON_IS_AGP) {
384 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
385 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
386 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
387 } else {
388 WREG32(MC_VM_AGP_BASE, 0);
389 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
390 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
391 }
392 if (evergreen_mc_wait_for_idle(rdev)) {
393 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
394 }
395 evergreen_mc_resume(rdev, &save);
396 /* we need to own VRAM, so turn off the VGA renderer here
397 * to stop it overwriting our objects */
398 rv515_vga_render_disable(rdev);
399}
400
401#if 0
402/*
403 * CP.
404 */
405static void evergreen_cp_stop(struct radeon_device *rdev)
406{
407 /* XXX */
408}
409
410
411static int evergreen_cp_load_microcode(struct radeon_device *rdev)
412{
413 /* XXX */
414
415 return 0;
416}
417
418
419/*
420 * Core functions
421 */
422static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
423 u32 num_backends,
424 u32 backend_disable_mask)
425{
426 u32 backend_map = 0;
427
428 return backend_map;
429}
430#endif
431
432static void evergreen_gpu_init(struct radeon_device *rdev)
433{
434 /* XXX */
435}
436
437int evergreen_mc_init(struct radeon_device *rdev)
438{
439 fixed20_12 a;
440 u32 tmp;
441 int chansize, numchan;
442 int r;
443
444 /* Get VRAM informations */
445 rdev->mc.vram_is_ddr = true;
446 tmp = RREG32(MC_ARB_RAMCFG);
447 if (tmp & CHANSIZE_OVERRIDE) {
448 chansize = 16;
449 } else if (tmp & CHANSIZE_MASK) {
450 chansize = 64;
451 } else {
452 chansize = 32;
453 }
454 tmp = RREG32(MC_SHARED_CHMAP);
455 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
456 case 0:
457 default:
458 numchan = 1;
459 break;
460 case 1:
461 numchan = 2;
462 break;
463 case 2:
464 numchan = 4;
465 break;
466 case 3:
467 numchan = 8;
468 break;
469 }
470 rdev->mc.vram_width = numchan * chansize;
471 /* Could aper size report 0 ? */
472 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
473 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
474 /* Setup GPU memory space */
475 /* size in MB on evergreen */
476 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
477 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
478
479 if (rdev->mc.mc_vram_size > rdev->mc.aper_size)
480 rdev->mc.mc_vram_size = rdev->mc.aper_size;
481
482 if (rdev->mc.real_vram_size > rdev->mc.aper_size)
483 rdev->mc.real_vram_size = rdev->mc.aper_size;
484
485 if (rdev->flags & RADEON_IS_AGP) {
486 r = radeon_agp_init(rdev);
487 if (r)
488 return r;
489 /* gtt_size is setup by radeon_agp_init */
490 rdev->mc.gtt_location = rdev->mc.agp_base;
491 tmp = 0xFFFFFFFFUL - rdev->mc.agp_base - rdev->mc.gtt_size;
492 /* Try to put vram before or after AGP because we
493 * we want SYSTEM_APERTURE to cover both VRAM and
494 * AGP so that GPU can catch out of VRAM/AGP access
495 */
496 if (rdev->mc.gtt_location > rdev->mc.mc_vram_size) {
497 /* Enought place before */
498 rdev->mc.vram_location = rdev->mc.gtt_location -
499 rdev->mc.mc_vram_size;
500 } else if (tmp > rdev->mc.mc_vram_size) {
501 /* Enought place after */
502 rdev->mc.vram_location = rdev->mc.gtt_location +
503 rdev->mc.gtt_size;
504 } else {
505 /* Try to setup VRAM then AGP might not
506 * not work on some card
507 */
508 rdev->mc.vram_location = 0x00000000UL;
509 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
510 }
511 } else {
512 rdev->mc.vram_location = 0x00000000UL;
513 rdev->mc.gtt_location = rdev->mc.mc_vram_size;
514 rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
515 }
516 rdev->mc.vram_start = rdev->mc.vram_location;
517 rdev->mc.vram_end = rdev->mc.vram_location + rdev->mc.mc_vram_size - 1;
518 rdev->mc.gtt_start = rdev->mc.gtt_location;
519 rdev->mc.gtt_end = rdev->mc.gtt_location + rdev->mc.gtt_size - 1;
520 /* FIXME: we should enforce default clock in case GPU is not in
521 * default setup
522 */
523 a.full = rfixed_const(100);
524 rdev->pm.sclk.full = rfixed_const(rdev->clock.default_sclk);
525 rdev->pm.sclk.full = rfixed_div(rdev->pm.sclk, a);
526 return 0;
527}
528int evergreen_gpu_reset(struct radeon_device *rdev)
529{
530 /* FIXME: implement for evergreen */
531 return 0;
532}
533
534static int evergreen_startup(struct radeon_device *rdev)
535{
536#if 0
537 int r;
538
539 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
540 r = r600_init_microcode(rdev);
541 if (r) {
542 DRM_ERROR("Failed to load firmware!\n");
543 return r;
544 }
545 }
546#endif
547 evergreen_mc_program(rdev);
548#if 0
549 if (rdev->flags & RADEON_IS_AGP) {
550 evergreem_agp_enable(rdev);
551 } else {
552 r = evergreen_pcie_gart_enable(rdev);
553 if (r)
554 return r;
555 }
556#endif
557 evergreen_gpu_init(rdev);
558#if 0
559 if (!rdev->r600_blit.shader_obj) {
560 r = r600_blit_init(rdev);
561 if (r) {
562 DRM_ERROR("radeon: failed blitter (%d).\n", r);
563 return r;
564 }
565 }
566
567 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
568 if (unlikely(r != 0))
569 return r;
570 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
571 &rdev->r600_blit.shader_gpu_addr);
572 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
573 if (r) {
574 DRM_ERROR("failed to pin blit object %d\n", r);
575 return r;
576 }
577
578 /* Enable IRQ */
579 r = r600_irq_init(rdev);
580 if (r) {
581 DRM_ERROR("radeon: IH init failed (%d).\n", r);
582 radeon_irq_kms_fini(rdev);
583 return r;
584 }
585 r600_irq_set(rdev);
586
587 r = radeon_ring_init(rdev, rdev->cp.ring_size);
588 if (r)
589 return r;
590 r = evergreen_cp_load_microcode(rdev);
591 if (r)
592 return r;
593 r = r600_cp_resume(rdev);
594 if (r)
595 return r;
596 /* write back buffer are not vital so don't worry about failure */
597 r600_wb_enable(rdev);
598#endif
599 return 0;
600}
601
602int evergreen_resume(struct radeon_device *rdev)
603{
604 int r;
605
606 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
607 * posting will perform necessary task to bring back GPU into good
608 * shape.
609 */
610 /* post card */
611 atom_asic_init(rdev->mode_info.atom_context);
612 /* Initialize clocks */
613 r = radeon_clocks_init(rdev);
614 if (r) {
615 return r;
616 }
617
618 r = evergreen_startup(rdev);
619 if (r) {
620 DRM_ERROR("r600 startup failed on resume\n");
621 return r;
622 }
623#if 0
624 r = r600_ib_test(rdev);
625 if (r) {
626 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
627 return r;
628 }
629#endif
630 return r;
631
632}
633
634int evergreen_suspend(struct radeon_device *rdev)
635{
636#if 0
637 int r;
638
639 /* FIXME: we should wait for ring to be empty */
640 r700_cp_stop(rdev);
641 rdev->cp.ready = false;
642 r600_wb_disable(rdev);
643 evergreen_pcie_gart_disable(rdev);
644 /* unpin shaders bo */
645 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
646 if (likely(r == 0)) {
647 radeon_bo_unpin(rdev->r600_blit.shader_obj);
648 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
649 }
650#endif
651 return 0;
652}
653
654static bool evergreen_card_posted(struct radeon_device *rdev)
655{
656 u32 reg;
657
658 /* first check CRTCs */
659 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
660 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
661 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
662 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
663 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
664 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
665 if (reg & EVERGREEN_CRTC_MASTER_EN)
666 return true;
667
668 /* then check MEM_SIZE, in case the crtcs are off */
669 if (RREG32(CONFIG_MEMSIZE))
670 return true;
671
672 return false;
673}
674
675/* Plan is to move initialization in that function and use
676 * helper function so that radeon_device_init pretty much
677 * do nothing more than calling asic specific function. This
678 * should also allow to remove a bunch of callback function
679 * like vram_info.
680 */
681int evergreen_init(struct radeon_device *rdev)
682{
683 int r;
684
685 r = radeon_dummy_page_init(rdev);
686 if (r)
687 return r;
688 /* This don't do much */
689 r = radeon_gem_init(rdev);
690 if (r)
691 return r;
692 /* Read BIOS */
693 if (!radeon_get_bios(rdev)) {
694 if (ASIC_IS_AVIVO(rdev))
695 return -EINVAL;
696 }
697 /* Must be an ATOMBIOS */
698 if (!rdev->is_atom_bios) {
699 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
700 return -EINVAL;
701 }
702 r = radeon_atombios_init(rdev);
703 if (r)
704 return r;
705 /* Post card if necessary */
706 if (!evergreen_card_posted(rdev)) {
707 if (!rdev->bios) {
708 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
709 return -EINVAL;
710 }
711 DRM_INFO("GPU not posted. posting now...\n");
712 atom_asic_init(rdev->mode_info.atom_context);
713 }
714 /* Initialize scratch registers */
715 r600_scratch_init(rdev);
716 /* Initialize surface registers */
717 radeon_surface_init(rdev);
718 /* Initialize clocks */
719 radeon_get_clock_info(rdev->ddev);
720 r = radeon_clocks_init(rdev);
721 if (r)
722 return r;
723 /* Initialize power management */
724 radeon_pm_init(rdev);
725 /* Fence driver */
726 r = radeon_fence_driver_init(rdev);
727 if (r)
728 return r;
729 r = evergreen_mc_init(rdev);
730 if (r)
731 return r;
732 /* Memory manager */
733 r = radeon_bo_init(rdev);
734 if (r)
735 return r;
736#if 0
737 r = radeon_irq_kms_init(rdev);
738 if (r)
739 return r;
740
741 rdev->cp.ring_obj = NULL;
742 r600_ring_init(rdev, 1024 * 1024);
743
744 rdev->ih.ring_obj = NULL;
745 r600_ih_ring_init(rdev, 64 * 1024);
746
747 r = r600_pcie_gart_init(rdev);
748 if (r)
749 return r;
750#endif
751 rdev->accel_working = false;
752 r = evergreen_startup(rdev);
753 if (r) {
754 evergreen_suspend(rdev);
755 /*r600_wb_fini(rdev);*/
756 /*radeon_ring_fini(rdev);*/
757 /*evergreen_pcie_gart_fini(rdev);*/
758 rdev->accel_working = false;
759 }
760 if (rdev->accel_working) {
761 r = radeon_ib_pool_init(rdev);
762 if (r) {
763 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
764 rdev->accel_working = false;
765 }
766 r = r600_ib_test(rdev);
767 if (r) {
768 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
769 rdev->accel_working = false;
770 }
771 }
772 return 0;
773}
774
775void evergreen_fini(struct radeon_device *rdev)
776{
777 evergreen_suspend(rdev);
778#if 0
779 r600_blit_fini(rdev);
780 r600_irq_fini(rdev);
781 radeon_irq_kms_fini(rdev);
782 radeon_ring_fini(rdev);
783 r600_wb_fini(rdev);
784 evergreen_pcie_gart_fini(rdev);
785#endif
786 radeon_gem_fini(rdev);
787 radeon_fence_driver_fini(rdev);
788 radeon_clocks_fini(rdev);
789 radeon_agp_fini(rdev);
790 radeon_bo_fini(rdev);
791 radeon_atombios_fini(rdev);
792 kfree(rdev->bios);
793 rdev->bios = NULL;
794 radeon_dummy_page_fini(rdev);
795}