]> bbs.cooldavid.org Git - net-next-2.6.git/blame - drivers/gpu/drm/radeon/evergreen.c
drm/radeon/kms: add support for evergreen power tables
[net-next-2.6.git] / drivers / gpu / drm / radeon / evergreen.c
CommitLineData
bcc1c2a1
AD
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include "drmP.h"
27#include "radeon.h"
e6990375 28#include "radeon_asic.h"
bcc1c2a1
AD
29#include "radeon_drm.h"
30#include "rv770d.h"
31#include "atom.h"
32#include "avivod.h"
33#include "evergreen_reg.h"
34
35static void evergreen_gpu_init(struct radeon_device *rdev);
36void evergreen_fini(struct radeon_device *rdev);
37
38bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
39{
40 bool connected = false;
41 /* XXX */
42 return connected;
43}
44
45void evergreen_hpd_set_polarity(struct radeon_device *rdev,
46 enum radeon_hpd_id hpd)
47{
48 /* XXX */
49}
50
51void evergreen_hpd_init(struct radeon_device *rdev)
52{
53 /* XXX */
54}
55
56
57void evergreen_bandwidth_update(struct radeon_device *rdev)
58{
59 /* XXX */
60}
61
62void evergreen_hpd_fini(struct radeon_device *rdev)
63{
64 /* XXX */
65}
66
67static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
68{
69 unsigned i;
70 u32 tmp;
71
72 for (i = 0; i < rdev->usec_timeout; i++) {
73 /* read MC_STATUS */
74 tmp = RREG32(SRBM_STATUS) & 0x1F00;
75 if (!tmp)
76 return 0;
77 udelay(1);
78 }
79 return -1;
80}
81
82/*
83 * GART
84 */
85int evergreen_pcie_gart_enable(struct radeon_device *rdev)
86{
87 u32 tmp;
88 int r, i;
89
90 if (rdev->gart.table.vram.robj == NULL) {
91 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
92 return -EINVAL;
93 }
94 r = radeon_gart_table_vram_pin(rdev);
95 if (r)
96 return r;
82568565 97 radeon_gart_restore(rdev);
bcc1c2a1
AD
98 /* Setup L2 cache */
99 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
100 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
101 EFFECTIVE_L2_QUEUE_SIZE(7));
102 WREG32(VM_L2_CNTL2, 0);
103 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
104 /* Setup TLB control */
105 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
106 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
107 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
108 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
109 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
110 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
111 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
112 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
113 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
114 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
115 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
116 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
117 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
118 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
119 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
120 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
121 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
122 (u32)(rdev->dummy_page.addr >> 12));
123 for (i = 1; i < 7; i++)
124 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
125
126 r600_pcie_gart_tlb_flush(rdev);
127 rdev->gart.ready = true;
128 return 0;
129}
130
131void evergreen_pcie_gart_disable(struct radeon_device *rdev)
132{
133 u32 tmp;
134 int i, r;
135
136 /* Disable all tables */
137 for (i = 0; i < 7; i++)
138 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
139
140 /* Setup L2 cache */
141 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
142 EFFECTIVE_L2_QUEUE_SIZE(7));
143 WREG32(VM_L2_CNTL2, 0);
144 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
145 /* Setup TLB control */
146 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
147 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
148 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
149 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
150 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
151 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
152 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
153 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
154 if (rdev->gart.table.vram.robj) {
155 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
156 if (likely(r == 0)) {
157 radeon_bo_kunmap(rdev->gart.table.vram.robj);
158 radeon_bo_unpin(rdev->gart.table.vram.robj);
159 radeon_bo_unreserve(rdev->gart.table.vram.robj);
160 }
161 }
162}
163
164void evergreen_pcie_gart_fini(struct radeon_device *rdev)
165{
166 evergreen_pcie_gart_disable(rdev);
167 radeon_gart_table_vram_free(rdev);
168 radeon_gart_fini(rdev);
169}
170
171
172void evergreen_agp_enable(struct radeon_device *rdev)
173{
174 u32 tmp;
175 int i;
176
177 /* Setup L2 cache */
178 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
179 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
180 EFFECTIVE_L2_QUEUE_SIZE(7));
181 WREG32(VM_L2_CNTL2, 0);
182 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
183 /* Setup TLB control */
184 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
185 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
186 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
187 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
188 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
189 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
190 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
191 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
192 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
193 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
194 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
195 for (i = 0; i < 7; i++)
196 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
197}
198
199static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
200{
201 save->vga_control[0] = RREG32(D1VGA_CONTROL);
202 save->vga_control[1] = RREG32(D2VGA_CONTROL);
203 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
204 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
205 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
206 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
207 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
208 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
209 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
210 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
211 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
212 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
213 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
214 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
215
216 /* Stop all video */
217 WREG32(VGA_RENDER_CONTROL, 0);
218 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
219 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
220 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
221 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
222 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
223 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
224 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
225 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
226 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
227 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
228 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
229 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
230 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
231 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
232 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
233 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
234 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
235 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
236
237 WREG32(D1VGA_CONTROL, 0);
238 WREG32(D2VGA_CONTROL, 0);
239 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
240 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
241 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
242 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
243}
244
245static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
246{
247 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
248 upper_32_bits(rdev->mc.vram_start));
249 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
250 upper_32_bits(rdev->mc.vram_start));
251 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
252 (u32)rdev->mc.vram_start);
253 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
254 (u32)rdev->mc.vram_start);
255
256 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
257 upper_32_bits(rdev->mc.vram_start));
258 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
259 upper_32_bits(rdev->mc.vram_start));
260 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
261 (u32)rdev->mc.vram_start);
262 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
263 (u32)rdev->mc.vram_start);
264
265 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
266 upper_32_bits(rdev->mc.vram_start));
267 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
268 upper_32_bits(rdev->mc.vram_start));
269 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
270 (u32)rdev->mc.vram_start);
271 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
272 (u32)rdev->mc.vram_start);
273
274 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
275 upper_32_bits(rdev->mc.vram_start));
276 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
277 upper_32_bits(rdev->mc.vram_start));
278 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
279 (u32)rdev->mc.vram_start);
280 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
281 (u32)rdev->mc.vram_start);
282
283 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
284 upper_32_bits(rdev->mc.vram_start));
285 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
286 upper_32_bits(rdev->mc.vram_start));
287 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
288 (u32)rdev->mc.vram_start);
289 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
290 (u32)rdev->mc.vram_start);
291
292 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
293 upper_32_bits(rdev->mc.vram_start));
294 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
295 upper_32_bits(rdev->mc.vram_start));
296 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
297 (u32)rdev->mc.vram_start);
298 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
299 (u32)rdev->mc.vram_start);
300
301 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
302 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
303 /* Unlock host access */
304 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
305 mdelay(1);
306 /* Restore video state */
307 WREG32(D1VGA_CONTROL, save->vga_control[0]);
308 WREG32(D2VGA_CONTROL, save->vga_control[1]);
309 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
310 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
311 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
312 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
313 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
314 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
315 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
316 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
317 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
318 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
319 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
320 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
321 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
322 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
323 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
324 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
325 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
326 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
327 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
328 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
329 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
330 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
331 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
332}
333
334static void evergreen_mc_program(struct radeon_device *rdev)
335{
336 struct evergreen_mc_save save;
337 u32 tmp;
338 int i, j;
339
340 /* Initialize HDP */
341 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
342 WREG32((0x2c14 + j), 0x00000000);
343 WREG32((0x2c18 + j), 0x00000000);
344 WREG32((0x2c1c + j), 0x00000000);
345 WREG32((0x2c20 + j), 0x00000000);
346 WREG32((0x2c24 + j), 0x00000000);
347 }
348 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
349
350 evergreen_mc_stop(rdev, &save);
351 if (evergreen_mc_wait_for_idle(rdev)) {
352 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
353 }
354 /* Lockout access through VGA aperture*/
355 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
356 /* Update configuration */
357 if (rdev->flags & RADEON_IS_AGP) {
358 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
359 /* VRAM before AGP */
360 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
361 rdev->mc.vram_start >> 12);
362 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
363 rdev->mc.gtt_end >> 12);
364 } else {
365 /* VRAM after AGP */
366 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
367 rdev->mc.gtt_start >> 12);
368 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
369 rdev->mc.vram_end >> 12);
370 }
371 } else {
372 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
373 rdev->mc.vram_start >> 12);
374 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
375 rdev->mc.vram_end >> 12);
376 }
377 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
378 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
379 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
380 WREG32(MC_VM_FB_LOCATION, tmp);
381 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
382 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
383 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
384 if (rdev->flags & RADEON_IS_AGP) {
385 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
386 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
387 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
388 } else {
389 WREG32(MC_VM_AGP_BASE, 0);
390 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
391 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
392 }
393 if (evergreen_mc_wait_for_idle(rdev)) {
394 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
395 }
396 evergreen_mc_resume(rdev, &save);
397 /* we need to own VRAM, so turn off the VGA renderer here
398 * to stop it overwriting our objects */
399 rv515_vga_render_disable(rdev);
400}
401
402#if 0
403/*
404 * CP.
405 */
406static void evergreen_cp_stop(struct radeon_device *rdev)
407{
408 /* XXX */
409}
410
411
412static int evergreen_cp_load_microcode(struct radeon_device *rdev)
413{
414 /* XXX */
415
416 return 0;
417}
418
419
420/*
421 * Core functions
422 */
423static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
424 u32 num_backends,
425 u32 backend_disable_mask)
426{
427 u32 backend_map = 0;
428
429 return backend_map;
430}
431#endif
432
433static void evergreen_gpu_init(struct radeon_device *rdev)
434{
435 /* XXX */
436}
437
438int evergreen_mc_init(struct radeon_device *rdev)
439{
bcc1c2a1
AD
440 u32 tmp;
441 int chansize, numchan;
bcc1c2a1
AD
442
443 /* Get VRAM informations */
444 rdev->mc.vram_is_ddr = true;
445 tmp = RREG32(MC_ARB_RAMCFG);
446 if (tmp & CHANSIZE_OVERRIDE) {
447 chansize = 16;
448 } else if (tmp & CHANSIZE_MASK) {
449 chansize = 64;
450 } else {
451 chansize = 32;
452 }
453 tmp = RREG32(MC_SHARED_CHMAP);
454 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
455 case 0:
456 default:
457 numchan = 1;
458 break;
459 case 1:
460 numchan = 2;
461 break;
462 case 2:
463 numchan = 4;
464 break;
465 case 3:
466 numchan = 8;
467 break;
468 }
469 rdev->mc.vram_width = numchan * chansize;
470 /* Could aper size report 0 ? */
471 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
472 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
473 /* Setup GPU memory space */
474 /* size in MB on evergreen */
475 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
476 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
51e5fcd3 477 rdev->mc.visible_vram_size = rdev->mc.aper_size;
d594e46a
JG
478 /* FIXME remove this once we support unmappable VRAM */
479 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
bcc1c2a1 480 rdev->mc.mc_vram_size = rdev->mc.aper_size;
bcc1c2a1 481 rdev->mc.real_vram_size = rdev->mc.aper_size;
bcc1c2a1 482 }
d594e46a 483 r600_vram_gtt_location(rdev, &rdev->mc);
f47299c5
AD
484 radeon_update_bandwidth_info(rdev);
485
bcc1c2a1
AD
486 return 0;
487}
d594e46a 488
225758d8
JG
489bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
490{
491 /* FIXME: implement for evergreen */
492 return false;
493}
494
a2d07b74 495int evergreen_asic_reset(struct radeon_device *rdev)
bcc1c2a1
AD
496{
497 /* FIXME: implement for evergreen */
498 return 0;
499}
500
501static int evergreen_startup(struct radeon_device *rdev)
502{
503#if 0
504 int r;
505
506 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
507 r = r600_init_microcode(rdev);
508 if (r) {
509 DRM_ERROR("Failed to load firmware!\n");
510 return r;
511 }
512 }
513#endif
514 evergreen_mc_program(rdev);
515#if 0
516 if (rdev->flags & RADEON_IS_AGP) {
517 evergreem_agp_enable(rdev);
518 } else {
519 r = evergreen_pcie_gart_enable(rdev);
520 if (r)
521 return r;
522 }
523#endif
524 evergreen_gpu_init(rdev);
525#if 0
526 if (!rdev->r600_blit.shader_obj) {
527 r = r600_blit_init(rdev);
528 if (r) {
529 DRM_ERROR("radeon: failed blitter (%d).\n", r);
530 return r;
531 }
532 }
533
534 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
535 if (unlikely(r != 0))
536 return r;
537 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
538 &rdev->r600_blit.shader_gpu_addr);
539 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
540 if (r) {
541 DRM_ERROR("failed to pin blit object %d\n", r);
542 return r;
543 }
544
545 /* Enable IRQ */
546 r = r600_irq_init(rdev);
547 if (r) {
548 DRM_ERROR("radeon: IH init failed (%d).\n", r);
549 radeon_irq_kms_fini(rdev);
550 return r;
551 }
552 r600_irq_set(rdev);
553
554 r = radeon_ring_init(rdev, rdev->cp.ring_size);
555 if (r)
556 return r;
557 r = evergreen_cp_load_microcode(rdev);
558 if (r)
559 return r;
560 r = r600_cp_resume(rdev);
561 if (r)
562 return r;
563 /* write back buffer are not vital so don't worry about failure */
564 r600_wb_enable(rdev);
565#endif
566 return 0;
567}
568
569int evergreen_resume(struct radeon_device *rdev)
570{
571 int r;
572
573 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
574 * posting will perform necessary task to bring back GPU into good
575 * shape.
576 */
577 /* post card */
578 atom_asic_init(rdev->mode_info.atom_context);
579 /* Initialize clocks */
580 r = radeon_clocks_init(rdev);
581 if (r) {
582 return r;
583 }
584
585 r = evergreen_startup(rdev);
586 if (r) {
587 DRM_ERROR("r600 startup failed on resume\n");
588 return r;
589 }
590#if 0
591 r = r600_ib_test(rdev);
592 if (r) {
593 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
594 return r;
595 }
596#endif
597 return r;
598
599}
600
601int evergreen_suspend(struct radeon_device *rdev)
602{
603#if 0
604 int r;
605
606 /* FIXME: we should wait for ring to be empty */
607 r700_cp_stop(rdev);
608 rdev->cp.ready = false;
609 r600_wb_disable(rdev);
610 evergreen_pcie_gart_disable(rdev);
611 /* unpin shaders bo */
612 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
613 if (likely(r == 0)) {
614 radeon_bo_unpin(rdev->r600_blit.shader_obj);
615 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
616 }
617#endif
618 return 0;
619}
620
621static bool evergreen_card_posted(struct radeon_device *rdev)
622{
623 u32 reg;
624
625 /* first check CRTCs */
626 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
627 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
628 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
629 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
630 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
631 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
632 if (reg & EVERGREEN_CRTC_MASTER_EN)
633 return true;
634
635 /* then check MEM_SIZE, in case the crtcs are off */
636 if (RREG32(CONFIG_MEMSIZE))
637 return true;
638
639 return false;
640}
641
642/* Plan is to move initialization in that function and use
643 * helper function so that radeon_device_init pretty much
644 * do nothing more than calling asic specific function. This
645 * should also allow to remove a bunch of callback function
646 * like vram_info.
647 */
648int evergreen_init(struct radeon_device *rdev)
649{
650 int r;
651
652 r = radeon_dummy_page_init(rdev);
653 if (r)
654 return r;
655 /* This don't do much */
656 r = radeon_gem_init(rdev);
657 if (r)
658 return r;
659 /* Read BIOS */
660 if (!radeon_get_bios(rdev)) {
661 if (ASIC_IS_AVIVO(rdev))
662 return -EINVAL;
663 }
664 /* Must be an ATOMBIOS */
665 if (!rdev->is_atom_bios) {
666 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
667 return -EINVAL;
668 }
669 r = radeon_atombios_init(rdev);
670 if (r)
671 return r;
672 /* Post card if necessary */
673 if (!evergreen_card_posted(rdev)) {
674 if (!rdev->bios) {
675 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
676 return -EINVAL;
677 }
678 DRM_INFO("GPU not posted. posting now...\n");
679 atom_asic_init(rdev->mode_info.atom_context);
680 }
681 /* Initialize scratch registers */
682 r600_scratch_init(rdev);
683 /* Initialize surface registers */
684 radeon_surface_init(rdev);
685 /* Initialize clocks */
686 radeon_get_clock_info(rdev->ddev);
687 r = radeon_clocks_init(rdev);
688 if (r)
689 return r;
690 /* Initialize power management */
691 radeon_pm_init(rdev);
692 /* Fence driver */
693 r = radeon_fence_driver_init(rdev);
694 if (r)
695 return r;
d594e46a
JG
696 /* initialize AGP */
697 if (rdev->flags & RADEON_IS_AGP) {
698 r = radeon_agp_init(rdev);
699 if (r)
700 radeon_agp_disable(rdev);
701 }
702 /* initialize memory controller */
bcc1c2a1
AD
703 r = evergreen_mc_init(rdev);
704 if (r)
705 return r;
706 /* Memory manager */
707 r = radeon_bo_init(rdev);
708 if (r)
709 return r;
710#if 0
711 r = radeon_irq_kms_init(rdev);
712 if (r)
713 return r;
714
715 rdev->cp.ring_obj = NULL;
716 r600_ring_init(rdev, 1024 * 1024);
717
718 rdev->ih.ring_obj = NULL;
719 r600_ih_ring_init(rdev, 64 * 1024);
720
721 r = r600_pcie_gart_init(rdev);
722 if (r)
723 return r;
724#endif
725 rdev->accel_working = false;
726 r = evergreen_startup(rdev);
727 if (r) {
728 evergreen_suspend(rdev);
729 /*r600_wb_fini(rdev);*/
730 /*radeon_ring_fini(rdev);*/
731 /*evergreen_pcie_gart_fini(rdev);*/
732 rdev->accel_working = false;
733 }
734 if (rdev->accel_working) {
735 r = radeon_ib_pool_init(rdev);
736 if (r) {
737 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
738 rdev->accel_working = false;
739 }
740 r = r600_ib_test(rdev);
741 if (r) {
742 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
743 rdev->accel_working = false;
744 }
745 }
746 return 0;
747}
748
749void evergreen_fini(struct radeon_device *rdev)
750{
29fb52ca 751 radeon_pm_fini(rdev);
bcc1c2a1
AD
752 evergreen_suspend(rdev);
753#if 0
754 r600_blit_fini(rdev);
755 r600_irq_fini(rdev);
756 radeon_irq_kms_fini(rdev);
757 radeon_ring_fini(rdev);
758 r600_wb_fini(rdev);
759 evergreen_pcie_gart_fini(rdev);
760#endif
761 radeon_gem_fini(rdev);
762 radeon_fence_driver_fini(rdev);
763 radeon_clocks_fini(rdev);
764 radeon_agp_fini(rdev);
765 radeon_bo_fini(rdev);
766 radeon_atombios_fini(rdev);
767 kfree(rdev->bios);
768 rdev->bios = NULL;
769 radeon_dummy_page_fini(rdev);
770}