| | @@ -325,11 +325,11 @@ |
| 325 | 325 | |
| 326 | 326 | /* |
| 327 | 327 | ** Return true if zTag matches one of the tags in the robot-restrict |
| 328 | 328 | ** setting. |
| 329 | 329 | */ |
| 330 | | -int robot_restrict_has_tag(const char *zTag){ |
| 330 | +static int robot_restrict_has_tag(const char *zTag){ |
| 331 | 331 | static const char *zGlob = 0; |
| 332 | 332 | if( zGlob==0 ){ |
| 333 | 333 | zGlob = db_get("robot-restrict",robot_restrict_default()); |
| 334 | 334 | if( zGlob==0 ) zGlob = ""; |
| 335 | 335 | } |
| | @@ -409,32 +409,60 @@ |
| 409 | 409 | fossil_free(zRequest); |
| 410 | 410 | return bMatch; |
| 411 | 411 | } |
| 412 | 412 | |
| 413 | 413 | /* |
| 414 | | -** Check to see if the page named in the argument is on the |
| 415 | | -** robot-restrict list. If it is on the list and if the user |
| 416 | | -** is "nobody" then bring up a captcha to test to make sure that |
| 417 | | -** client is not a robot. |
| 414 | +** Return true if one or more of the conditions below are true. |
| 415 | +** Return false if all of the following are false: |
| 416 | +** |
| 417 | +** * The zTag is on the robot-restrict list |
| 418 | +** |
| 419 | +** * The client that submitted the HTTP request might be |
| 420 | +** a robot |
| 421 | +** |
| 422 | +** * The Request URI does not match any of the exceptions |
| 423 | +** in the robot-exception setting. |
| 424 | +** |
| 425 | +** In other words, return true if a call to robot_restrict() would |
| 426 | +** return true and false if a call to robot_restrict() would return |
| 427 | +** false. |
| 418 | 428 | ** |
| 419 | | -** This routine returns true if a captcha was rendered and if subsequent |
| 420 | | -** page generation should be aborted. It returns false if the page |
| 421 | | -** should not be restricted and should be rendered normally. |
| 429 | +** The difference between this routine an robot_restrict() is that |
| 430 | +** this routine does not generate a proof-of-work captcha. This |
| 431 | +** routine does not change the HTTP reply in any way. It simply |
| 432 | +** returns true or false. |
| 422 | 433 | */ |
| 423 | | -int robot_restrict(const char *zTag){ |
| 434 | +int robot_would_be_restricted(const char *zTag){ |
| 424 | 435 | if( robot.resultCache==KNOWN_NOT_ROBOT ) return 0; |
| 425 | 436 | if( !robot_restrict_has_tag(zTag) ) return 0; |
| 426 | 437 | if( !client_might_be_a_robot() ) return 0; |
| 427 | 438 | if( robot_exception() ){ |
| 428 | 439 | robot.resultCache = KNOWN_NOT_ROBOT; |
| 429 | 440 | return 0; |
| 430 | 441 | } |
| 431 | | - |
| 432 | | - /* Generate the proof-of-work captcha */ |
| 433 | | - ask_for_proof_that_client_is_not_robot(); |
| 434 | 442 | return 1; |
| 435 | 443 | } |
| 444 | + |
| 445 | +/* |
| 446 | +** Check to see if the page named in the argument is on the |
| 447 | +** robot-restrict list. If it is on the list and if the user |
| 448 | +** is might be a robot, then bring up a captcha to test to make |
| 449 | +** sure that client is not a robot. |
| 450 | +** |
| 451 | +** This routine returns true if a captcha was rendered and if subsequent |
| 452 | +** page generation should be aborted. It returns false if the page |
| 453 | +** should not be restricted and should be rendered normally. |
| 454 | +*/ |
| 455 | +int robot_restrict(const char *zTag){ |
| 456 | + if( robot_would_be_restricted(zTag) ){ |
| 457 | + /* Generate the proof-of-work captcha */ |
| 458 | + ask_for_proof_that_client_is_not_robot(); |
| 459 | + return 1; |
| 460 | + }else{ |
| 461 | + return 0; |
| 462 | + } |
| 463 | +} |
| 436 | 464 | |
| 437 | 465 | /* |
| 438 | 466 | ** Check to see if a robot is allowed to download a tarball, ZIP archive, |
| 439 | 467 | ** or SQL Archive for a particular check-in identified by the "rid" |
| 440 | 468 | ** argument. Return true to block the download. Return false to |
| 441 | 469 | |