Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0 |
| 2 | /****************************************************************************** |
| 3 | * |
| 4 | * Module Name: psparse - Parser top level AML parse routines |
| 5 | * |
| 6 | * Copyright (C) 2000 - 2018, Intel Corp. |
| 7 | * |
| 8 | *****************************************************************************/ |
| 9 | |
| 10 | /* |
| 11 | * Parse the AML and build an operation tree as most interpreters, |
| 12 | * like Perl, do. Parsing is done by hand rather than with a YACC |
| 13 | * generated parser to tightly constrain stack and dynamic memory |
| 14 | * usage. At the same time, parsing is kept flexible and the code |
| 15 | * fairly compact by parsing based on a list of AML opcode |
| 16 | * templates in aml_op_info[] |
| 17 | */ |
| 18 | |
| 19 | #include <acpi/acpi.h> |
| 20 | #include "accommon.h" |
| 21 | #include "acparser.h" |
| 22 | #include "acdispat.h" |
| 23 | #include "amlcode.h" |
| 24 | #include "acinterp.h" |
| 25 | #include "acnamesp.h" |
| 26 | |
| 27 | #define _COMPONENT ACPI_PARSER |
| 28 | ACPI_MODULE_NAME("psparse") |
| 29 | |
| 30 | /******************************************************************************* |
| 31 | * |
| 32 | * FUNCTION: acpi_ps_get_opcode_size |
| 33 | * |
| 34 | * PARAMETERS: opcode - An AML opcode |
| 35 | * |
| 36 | * RETURN: Size of the opcode, in bytes (1 or 2) |
| 37 | * |
| 38 | * DESCRIPTION: Get the size of the current opcode. |
| 39 | * |
| 40 | ******************************************************************************/ |
| 41 | u32 acpi_ps_get_opcode_size(u32 opcode) |
| 42 | { |
| 43 | |
| 44 | /* Extended (2-byte) opcode if > 255 */ |
| 45 | |
| 46 | if (opcode > 0x00FF) { |
| 47 | return (2); |
| 48 | } |
| 49 | |
| 50 | /* Otherwise, just a single byte opcode */ |
| 51 | |
| 52 | return (1); |
| 53 | } |
| 54 | |
| 55 | /******************************************************************************* |
| 56 | * |
| 57 | * FUNCTION: acpi_ps_peek_opcode |
| 58 | * |
| 59 | * PARAMETERS: parser_state - A parser state object |
| 60 | * |
| 61 | * RETURN: Next AML opcode |
| 62 | * |
| 63 | * DESCRIPTION: Get next AML opcode (without incrementing AML pointer) |
| 64 | * |
| 65 | ******************************************************************************/ |
| 66 | |
| 67 | u16 acpi_ps_peek_opcode(struct acpi_parse_state * parser_state) |
| 68 | { |
| 69 | u8 *aml; |
| 70 | u16 opcode; |
| 71 | |
| 72 | aml = parser_state->aml; |
| 73 | opcode = (u16) ACPI_GET8(aml); |
| 74 | |
| 75 | if (opcode == AML_EXTENDED_PREFIX) { |
| 76 | |
| 77 | /* Extended opcode, get the second opcode byte */ |
| 78 | |
| 79 | aml++; |
| 80 | opcode = (u16) ((opcode << 8) | ACPI_GET8(aml)); |
| 81 | } |
| 82 | |
| 83 | return (opcode); |
| 84 | } |
| 85 | |
| 86 | /******************************************************************************* |
| 87 | * |
| 88 | * FUNCTION: acpi_ps_complete_this_op |
| 89 | * |
| 90 | * PARAMETERS: walk_state - Current State |
| 91 | * op - Op to complete |
| 92 | * |
| 93 | * RETURN: Status |
| 94 | * |
| 95 | * DESCRIPTION: Perform any cleanup at the completion of an Op. |
| 96 | * |
| 97 | ******************************************************************************/ |
| 98 | |
| 99 | acpi_status |
| 100 | acpi_ps_complete_this_op(struct acpi_walk_state *walk_state, |
| 101 | union acpi_parse_object *op) |
| 102 | { |
| 103 | union acpi_parse_object *prev; |
| 104 | union acpi_parse_object *next; |
| 105 | const struct acpi_opcode_info *parent_info; |
| 106 | union acpi_parse_object *replacement_op = NULL; |
| 107 | acpi_status status = AE_OK; |
| 108 | |
| 109 | ACPI_FUNCTION_TRACE_PTR(ps_complete_this_op, op); |
| 110 | |
| 111 | /* Check for null Op, can happen if AML code is corrupt */ |
| 112 | |
| 113 | if (!op) { |
| 114 | return_ACPI_STATUS(AE_OK); /* OK for now */ |
| 115 | } |
| 116 | |
| 117 | acpi_ex_stop_trace_opcode(op, walk_state); |
| 118 | |
| 119 | /* Delete this op and the subtree below it if asked to */ |
| 120 | |
| 121 | if (((walk_state->parse_flags & ACPI_PARSE_TREE_MASK) != |
| 122 | ACPI_PARSE_DELETE_TREE) |
| 123 | || (walk_state->op_info->class == AML_CLASS_ARGUMENT)) { |
| 124 | return_ACPI_STATUS(AE_OK); |
| 125 | } |
| 126 | |
| 127 | /* Make sure that we only delete this subtree */ |
| 128 | |
| 129 | if (op->common.parent) { |
| 130 | prev = op->common.parent->common.value.arg; |
| 131 | if (!prev) { |
| 132 | |
| 133 | /* Nothing more to do */ |
| 134 | |
| 135 | goto cleanup; |
| 136 | } |
| 137 | |
| 138 | /* |
| 139 | * Check if we need to replace the operator and its subtree |
| 140 | * with a return value op (placeholder op) |
| 141 | */ |
| 142 | parent_info = |
| 143 | acpi_ps_get_opcode_info(op->common.parent->common. |
| 144 | aml_opcode); |
| 145 | |
| 146 | switch (parent_info->class) { |
| 147 | case AML_CLASS_CONTROL: |
| 148 | |
| 149 | break; |
| 150 | |
| 151 | case AML_CLASS_CREATE: |
| 152 | /* |
| 153 | * These opcodes contain term_arg operands. The current |
| 154 | * op must be replaced by a placeholder return op |
| 155 | */ |
| 156 | replacement_op = |
| 157 | acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, |
| 158 | op->common.aml); |
| 159 | if (!replacement_op) { |
| 160 | status = AE_NO_MEMORY; |
| 161 | } |
| 162 | break; |
| 163 | |
| 164 | case AML_CLASS_NAMED_OBJECT: |
| 165 | /* |
| 166 | * These opcodes contain term_arg operands. The current |
| 167 | * op must be replaced by a placeholder return op |
| 168 | */ |
| 169 | if ((op->common.parent->common.aml_opcode == |
| 170 | AML_REGION_OP) |
| 171 | || (op->common.parent->common.aml_opcode == |
| 172 | AML_DATA_REGION_OP) |
| 173 | || (op->common.parent->common.aml_opcode == |
| 174 | AML_BUFFER_OP) |
| 175 | || (op->common.parent->common.aml_opcode == |
| 176 | AML_PACKAGE_OP) |
| 177 | || (op->common.parent->common.aml_opcode == |
| 178 | AML_BANK_FIELD_OP) |
| 179 | || (op->common.parent->common.aml_opcode == |
| 180 | AML_VARIABLE_PACKAGE_OP)) { |
| 181 | replacement_op = |
| 182 | acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, |
| 183 | op->common.aml); |
| 184 | if (!replacement_op) { |
| 185 | status = AE_NO_MEMORY; |
| 186 | } |
| 187 | } else |
| 188 | if ((op->common.parent->common.aml_opcode == |
| 189 | AML_NAME_OP) |
| 190 | && (walk_state->pass_number <= |
| 191 | ACPI_IMODE_LOAD_PASS2)) { |
| 192 | if ((op->common.aml_opcode == AML_BUFFER_OP) |
| 193 | || (op->common.aml_opcode == AML_PACKAGE_OP) |
| 194 | || (op->common.aml_opcode == |
| 195 | AML_VARIABLE_PACKAGE_OP)) { |
| 196 | replacement_op = |
| 197 | acpi_ps_alloc_op(op->common. |
| 198 | aml_opcode, |
| 199 | op->common.aml); |
| 200 | if (!replacement_op) { |
| 201 | status = AE_NO_MEMORY; |
| 202 | } else { |
| 203 | replacement_op->named.data = |
| 204 | op->named.data; |
| 205 | replacement_op->named.length = |
| 206 | op->named.length; |
| 207 | } |
| 208 | } |
| 209 | } |
| 210 | break; |
| 211 | |
| 212 | default: |
| 213 | |
| 214 | replacement_op = |
| 215 | acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP, |
| 216 | op->common.aml); |
| 217 | if (!replacement_op) { |
| 218 | status = AE_NO_MEMORY; |
| 219 | } |
| 220 | } |
| 221 | |
| 222 | /* We must unlink this op from the parent tree */ |
| 223 | |
| 224 | if (prev == op) { |
| 225 | |
| 226 | /* This op is the first in the list */ |
| 227 | |
| 228 | if (replacement_op) { |
| 229 | replacement_op->common.parent = |
| 230 | op->common.parent; |
| 231 | replacement_op->common.value.arg = NULL; |
| 232 | replacement_op->common.node = op->common.node; |
| 233 | op->common.parent->common.value.arg = |
| 234 | replacement_op; |
| 235 | replacement_op->common.next = op->common.next; |
| 236 | } else { |
| 237 | op->common.parent->common.value.arg = |
| 238 | op->common.next; |
| 239 | } |
| 240 | } |
| 241 | |
| 242 | /* Search the parent list */ |
| 243 | |
| 244 | else |
| 245 | while (prev) { |
| 246 | |
| 247 | /* Traverse all siblings in the parent's argument list */ |
| 248 | |
| 249 | next = prev->common.next; |
| 250 | if (next == op) { |
| 251 | if (replacement_op) { |
| 252 | replacement_op->common.parent = |
| 253 | op->common.parent; |
| 254 | replacement_op->common.value. |
| 255 | arg = NULL; |
| 256 | replacement_op->common.node = |
| 257 | op->common.node; |
| 258 | prev->common.next = |
| 259 | replacement_op; |
| 260 | replacement_op->common.next = |
| 261 | op->common.next; |
| 262 | next = NULL; |
| 263 | } else { |
| 264 | prev->common.next = |
| 265 | op->common.next; |
| 266 | next = NULL; |
| 267 | } |
| 268 | } |
| 269 | prev = next; |
| 270 | } |
| 271 | } |
| 272 | |
| 273 | cleanup: |
| 274 | |
| 275 | /* Now we can actually delete the subtree rooted at Op */ |
| 276 | |
| 277 | acpi_ps_delete_parse_tree(op); |
| 278 | return_ACPI_STATUS(status); |
| 279 | } |
| 280 | |
| 281 | /******************************************************************************* |
| 282 | * |
| 283 | * FUNCTION: acpi_ps_next_parse_state |
| 284 | * |
| 285 | * PARAMETERS: walk_state - Current state |
| 286 | * op - Current parse op |
| 287 | * callback_status - Status from previous operation |
| 288 | * |
| 289 | * RETURN: Status |
| 290 | * |
| 291 | * DESCRIPTION: Update the parser state based upon the return exception from |
| 292 | * the parser callback. |
| 293 | * |
| 294 | ******************************************************************************/ |
| 295 | |
| 296 | acpi_status |
| 297 | acpi_ps_next_parse_state(struct acpi_walk_state *walk_state, |
| 298 | union acpi_parse_object *op, |
| 299 | acpi_status callback_status) |
| 300 | { |
| 301 | struct acpi_parse_state *parser_state = &walk_state->parser_state; |
| 302 | acpi_status status = AE_CTRL_PENDING; |
| 303 | |
| 304 | ACPI_FUNCTION_TRACE_PTR(ps_next_parse_state, op); |
| 305 | |
| 306 | switch (callback_status) { |
| 307 | case AE_CTRL_TERMINATE: |
| 308 | /* |
| 309 | * A control method was terminated via a RETURN statement. |
| 310 | * The walk of this method is complete. |
| 311 | */ |
| 312 | parser_state->aml = parser_state->aml_end; |
| 313 | status = AE_CTRL_TERMINATE; |
| 314 | break; |
| 315 | |
| 316 | case AE_CTRL_BREAK: |
| 317 | |
| 318 | parser_state->aml = walk_state->aml_last_while; |
| 319 | walk_state->control_state->common.value = FALSE; |
| 320 | status = AE_CTRL_BREAK; |
| 321 | break; |
| 322 | |
| 323 | case AE_CTRL_CONTINUE: |
| 324 | |
| 325 | parser_state->aml = walk_state->aml_last_while; |
| 326 | status = AE_CTRL_CONTINUE; |
| 327 | break; |
| 328 | |
| 329 | case AE_CTRL_PENDING: |
| 330 | |
| 331 | parser_state->aml = walk_state->aml_last_while; |
| 332 | break; |
| 333 | |
| 334 | #if 0 |
| 335 | case AE_CTRL_SKIP: |
| 336 | |
| 337 | parser_state->aml = parser_state->scope->parse_scope.pkg_end; |
| 338 | status = AE_OK; |
| 339 | break; |
| 340 | #endif |
| 341 | |
| 342 | case AE_CTRL_TRUE: |
| 343 | /* |
| 344 | * Predicate of an IF was true, and we are at the matching ELSE. |
| 345 | * Just close out this package |
| 346 | */ |
| 347 | parser_state->aml = acpi_ps_get_next_package_end(parser_state); |
| 348 | status = AE_CTRL_PENDING; |
| 349 | break; |
| 350 | |
| 351 | case AE_CTRL_FALSE: |
| 352 | /* |
| 353 | * Either an IF/WHILE Predicate was false or we encountered a BREAK |
| 354 | * opcode. In both cases, we do not execute the rest of the |
| 355 | * package; We simply close out the parent (finishing the walk of |
| 356 | * this branch of the tree) and continue execution at the parent |
| 357 | * level. |
| 358 | */ |
| 359 | parser_state->aml = parser_state->scope->parse_scope.pkg_end; |
| 360 | |
| 361 | /* In the case of a BREAK, just force a predicate (if any) to FALSE */ |
| 362 | |
| 363 | walk_state->control_state->common.value = FALSE; |
| 364 | status = AE_CTRL_END; |
| 365 | break; |
| 366 | |
| 367 | case AE_CTRL_TRANSFER: |
| 368 | |
| 369 | /* A method call (invocation) -- transfer control */ |
| 370 | |
| 371 | status = AE_CTRL_TRANSFER; |
| 372 | walk_state->prev_op = op; |
| 373 | walk_state->method_call_op = op; |
| 374 | walk_state->method_call_node = |
| 375 | (op->common.value.arg)->common.node; |
| 376 | |
| 377 | /* Will return value (if any) be used by the caller? */ |
| 378 | |
| 379 | walk_state->return_used = |
| 380 | acpi_ds_is_result_used(op, walk_state); |
| 381 | break; |
| 382 | |
| 383 | default: |
| 384 | |
| 385 | status = callback_status; |
| 386 | if ((callback_status & AE_CODE_MASK) == AE_CODE_CONTROL) { |
| 387 | status = AE_OK; |
| 388 | } |
| 389 | break; |
| 390 | } |
| 391 | |
| 392 | return_ACPI_STATUS(status); |
| 393 | } |
| 394 | |
| 395 | /******************************************************************************* |
| 396 | * |
| 397 | * FUNCTION: acpi_ps_parse_aml |
| 398 | * |
| 399 | * PARAMETERS: walk_state - Current state |
| 400 | * |
| 401 | * |
| 402 | * RETURN: Status |
| 403 | * |
| 404 | * DESCRIPTION: Parse raw AML and return a tree of ops |
| 405 | * |
| 406 | ******************************************************************************/ |
| 407 | |
| 408 | acpi_status acpi_ps_parse_aml(struct acpi_walk_state *walk_state) |
| 409 | { |
| 410 | acpi_status status; |
| 411 | struct acpi_thread_state *thread; |
| 412 | struct acpi_thread_state *prev_walk_list = acpi_gbl_current_walk_list; |
| 413 | struct acpi_walk_state *previous_walk_state; |
| 414 | |
| 415 | ACPI_FUNCTION_TRACE(ps_parse_aml); |
| 416 | |
| 417 | ACPI_DEBUG_PRINT((ACPI_DB_PARSE, |
| 418 | "Entered with WalkState=%p Aml=%p size=%X\n", |
| 419 | walk_state, walk_state->parser_state.aml, |
| 420 | walk_state->parser_state.aml_size)); |
| 421 | |
| 422 | if (!walk_state->parser_state.aml) { |
| 423 | return_ACPI_STATUS(AE_BAD_ADDRESS); |
| 424 | } |
| 425 | |
| 426 | /* Create and initialize a new thread state */ |
| 427 | |
| 428 | thread = acpi_ut_create_thread_state(); |
| 429 | if (!thread) { |
| 430 | if (walk_state->method_desc) { |
| 431 | |
| 432 | /* Executing a control method - additional cleanup */ |
| 433 | |
| 434 | acpi_ds_terminate_control_method(walk_state-> |
| 435 | method_desc, |
| 436 | walk_state); |
| 437 | } |
| 438 | |
| 439 | acpi_ds_delete_walk_state(walk_state); |
| 440 | return_ACPI_STATUS(AE_NO_MEMORY); |
| 441 | } |
| 442 | |
| 443 | walk_state->thread = thread; |
| 444 | |
| 445 | /* |
| 446 | * If executing a method, the starting sync_level is this method's |
| 447 | * sync_level |
| 448 | */ |
| 449 | if (walk_state->method_desc) { |
| 450 | walk_state->thread->current_sync_level = |
| 451 | walk_state->method_desc->method.sync_level; |
| 452 | } |
| 453 | |
| 454 | acpi_ds_push_walk_state(walk_state, thread); |
| 455 | |
| 456 | /* |
| 457 | * This global allows the AML debugger to get a handle to the currently |
| 458 | * executing control method. |
| 459 | */ |
| 460 | acpi_gbl_current_walk_list = thread; |
| 461 | |
| 462 | /* |
| 463 | * Execute the walk loop as long as there is a valid Walk State. This |
| 464 | * handles nested control method invocations without recursion. |
| 465 | */ |
| 466 | ACPI_DEBUG_PRINT((ACPI_DB_PARSE, "State=%p\n", walk_state)); |
| 467 | |
| 468 | status = AE_OK; |
| 469 | while (walk_state) { |
| 470 | if (ACPI_SUCCESS(status)) { |
| 471 | /* |
| 472 | * The parse_loop executes AML until the method terminates |
| 473 | * or calls another method. |
| 474 | */ |
| 475 | status = acpi_ps_parse_loop(walk_state); |
| 476 | } |
| 477 | |
| 478 | ACPI_DEBUG_PRINT((ACPI_DB_PARSE, |
| 479 | "Completed one call to walk loop, %s State=%p\n", |
| 480 | acpi_format_exception(status), walk_state)); |
| 481 | |
| 482 | if (status == AE_CTRL_TRANSFER) { |
| 483 | /* |
| 484 | * A method call was detected. |
| 485 | * Transfer control to the called control method |
| 486 | */ |
| 487 | status = |
| 488 | acpi_ds_call_control_method(thread, walk_state, |
| 489 | NULL); |
| 490 | if (ACPI_FAILURE(status)) { |
| 491 | status = |
| 492 | acpi_ds_method_error(status, walk_state); |
| 493 | } |
| 494 | |
| 495 | /* |
| 496 | * If the transfer to the new method method call worked |
| 497 | *, a new walk state was created -- get it |
| 498 | */ |
| 499 | walk_state = acpi_ds_get_current_walk_state(thread); |
| 500 | continue; |
| 501 | } else if (status == AE_CTRL_TERMINATE) { |
| 502 | status = AE_OK; |
| 503 | } else if ((status != AE_OK) && (walk_state->method_desc)) { |
| 504 | |
| 505 | /* Either the method parse or actual execution failed */ |
| 506 | |
| 507 | acpi_ex_exit_interpreter(); |
| 508 | if (status == AE_ABORT_METHOD) { |
| 509 | acpi_ns_print_node_pathname(walk_state-> |
| 510 | method_node, |
| 511 | "Method aborted:"); |
| 512 | acpi_os_printf("\n"); |
| 513 | } else { |
| 514 | ACPI_ERROR_METHOD |
| 515 | ("Method parse/execution failed", |
| 516 | walk_state->method_node, NULL, status); |
| 517 | } |
| 518 | acpi_ex_enter_interpreter(); |
| 519 | |
| 520 | /* Check for possible multi-thread reentrancy problem */ |
| 521 | |
| 522 | if ((status == AE_ALREADY_EXISTS) && |
| 523 | (!(walk_state->method_desc->method.info_flags & |
| 524 | ACPI_METHOD_SERIALIZED))) { |
| 525 | /* |
| 526 | * Method is not serialized and tried to create an object |
| 527 | * twice. The probable cause is that the method cannot |
| 528 | * handle reentrancy. Mark as "pending serialized" now, and |
| 529 | * then mark "serialized" when the last thread exits. |
| 530 | */ |
| 531 | walk_state->method_desc->method.info_flags |= |
| 532 | ACPI_METHOD_SERIALIZED_PENDING; |
| 533 | } |
| 534 | } |
| 535 | |
| 536 | /* We are done with this walk, move on to the parent if any */ |
| 537 | |
| 538 | walk_state = acpi_ds_pop_walk_state(thread); |
| 539 | |
| 540 | /* Reset the current scope to the beginning of scope stack */ |
| 541 | |
| 542 | acpi_ds_scope_stack_clear(walk_state); |
| 543 | |
| 544 | /* |
| 545 | * If we just returned from the execution of a control method or if we |
| 546 | * encountered an error during the method parse phase, there's lots of |
| 547 | * cleanup to do |
| 548 | */ |
| 549 | if (((walk_state->parse_flags & ACPI_PARSE_MODE_MASK) == |
| 550 | ACPI_PARSE_EXECUTE && |
| 551 | !(walk_state->parse_flags & ACPI_PARSE_MODULE_LEVEL)) || |
| 552 | (ACPI_FAILURE(status))) { |
| 553 | acpi_ds_terminate_control_method(walk_state-> |
| 554 | method_desc, |
| 555 | walk_state); |
| 556 | } |
| 557 | |
| 558 | /* Delete this walk state and all linked control states */ |
| 559 | |
| 560 | acpi_ps_cleanup_scope(&walk_state->parser_state); |
| 561 | previous_walk_state = walk_state; |
| 562 | |
| 563 | ACPI_DEBUG_PRINT((ACPI_DB_PARSE, |
| 564 | "ReturnValue=%p, ImplicitValue=%p State=%p\n", |
| 565 | walk_state->return_desc, |
| 566 | walk_state->implicit_return_obj, walk_state)); |
| 567 | |
| 568 | /* Check if we have restarted a preempted walk */ |
| 569 | |
| 570 | walk_state = acpi_ds_get_current_walk_state(thread); |
| 571 | if (walk_state) { |
| 572 | if (ACPI_SUCCESS(status)) { |
| 573 | /* |
| 574 | * There is another walk state, restart it. |
| 575 | * If the method return value is not used by the parent, |
| 576 | * The object is deleted |
| 577 | */ |
| 578 | if (!previous_walk_state->return_desc) { |
| 579 | /* |
| 580 | * In slack mode execution, if there is no return value |
| 581 | * we should implicitly return zero (0) as a default value. |
| 582 | */ |
| 583 | if (acpi_gbl_enable_interpreter_slack && |
| 584 | !previous_walk_state-> |
| 585 | implicit_return_obj) { |
| 586 | previous_walk_state-> |
| 587 | implicit_return_obj = |
| 588 | acpi_ut_create_integer_object |
| 589 | ((u64) 0); |
| 590 | if (!previous_walk_state-> |
| 591 | implicit_return_obj) { |
| 592 | return_ACPI_STATUS |
| 593 | (AE_NO_MEMORY); |
| 594 | } |
| 595 | } |
| 596 | |
| 597 | /* Restart the calling control method */ |
| 598 | |
| 599 | status = |
| 600 | acpi_ds_restart_control_method |
| 601 | (walk_state, |
| 602 | previous_walk_state-> |
| 603 | implicit_return_obj); |
| 604 | } else { |
| 605 | /* |
| 606 | * We have a valid return value, delete any implicit |
| 607 | * return value. |
| 608 | */ |
| 609 | acpi_ds_clear_implicit_return |
| 610 | (previous_walk_state); |
| 611 | |
| 612 | status = |
| 613 | acpi_ds_restart_control_method |
| 614 | (walk_state, |
| 615 | previous_walk_state->return_desc); |
| 616 | } |
| 617 | if (ACPI_SUCCESS(status)) { |
| 618 | walk_state->walk_type |= |
| 619 | ACPI_WALK_METHOD_RESTART; |
| 620 | } |
| 621 | } else { |
| 622 | /* On error, delete any return object or implicit return */ |
| 623 | |
| 624 | acpi_ut_remove_reference(previous_walk_state-> |
| 625 | return_desc); |
| 626 | acpi_ds_clear_implicit_return |
| 627 | (previous_walk_state); |
| 628 | } |
| 629 | } |
| 630 | |
| 631 | /* |
| 632 | * Just completed a 1st-level method, save the final internal return |
| 633 | * value (if any) |
| 634 | */ |
| 635 | else if (previous_walk_state->caller_return_desc) { |
| 636 | if (previous_walk_state->implicit_return_obj) { |
| 637 | *(previous_walk_state->caller_return_desc) = |
| 638 | previous_walk_state->implicit_return_obj; |
| 639 | } else { |
| 640 | /* NULL if no return value */ |
| 641 | |
| 642 | *(previous_walk_state->caller_return_desc) = |
| 643 | previous_walk_state->return_desc; |
| 644 | } |
| 645 | } else { |
| 646 | if (previous_walk_state->return_desc) { |
| 647 | |
| 648 | /* Caller doesn't want it, must delete it */ |
| 649 | |
| 650 | acpi_ut_remove_reference(previous_walk_state-> |
| 651 | return_desc); |
| 652 | } |
| 653 | if (previous_walk_state->implicit_return_obj) { |
| 654 | |
| 655 | /* Caller doesn't want it, must delete it */ |
| 656 | |
| 657 | acpi_ut_remove_reference(previous_walk_state-> |
| 658 | implicit_return_obj); |
| 659 | } |
| 660 | } |
| 661 | |
| 662 | acpi_ds_delete_walk_state(previous_walk_state); |
| 663 | } |
| 664 | |
| 665 | /* Normal exit */ |
| 666 | |
| 667 | acpi_ex_release_all_mutexes(thread); |
| 668 | acpi_ut_delete_generic_state(ACPI_CAST_PTR |
| 669 | (union acpi_generic_state, thread)); |
| 670 | acpi_gbl_current_walk_list = prev_walk_list; |
| 671 | return_ACPI_STATUS(status); |
| 672 | } |