Browse Source

whitespace pedantry

Brennen Bearnes 11 months ago
parent
commit
f941db1713
2 changed files with 112 additions and 94 deletions
  1. 9
    9
      src/dependencies.php
  2. 103
    85
      src/routes.php

+ 9
- 9
src/dependencies.php View File

@@ -4,16 +4,16 @@
4 4
 $container = $app->getContainer();
5 5
 
6 6
 // view renderer
7
-$container['renderer'] = function ($c) {
8
-  $settings = $c->get('settings')['renderer'];
9
-  return new Slim\Views\PhpRenderer($settings['template_path']);
7
+$container['renderer'] = function ( $c ) {
8
+	$settings = $c->get( 'settings' )['renderer'];
9
+	return new Slim\Views\PhpRenderer( $settings['template_path'] );
10 10
 };
11 11
 
12 12
 // monolog
13
-$container['logger'] = function ($c) {
14
-  $settings = $c->get('settings')['logger'];
15
-  $logger = new Monolog\Logger($settings['name']);
16
-  $logger->pushProcessor(new Monolog\Processor\UidProcessor());
17
-  $logger->pushHandler(new Monolog\Handler\StreamHandler($settings['path'], $settings['level']));
18
-  return $logger;
13
+$container['logger'] = function ( $c ) {
14
+	$settings = $c->get( 'settings' )['logger'];
15
+	$logger = new Monolog\Logger( $settings['name'] );
16
+	$logger->pushProcessor( new Monolog\Processor\UidProcessor() );
17
+	$logger->pushHandler( new Monolog\Handler\StreamHandler( $settings['path'], $settings['level'] ) );
18
+	return $logger;
19 19
 };

+ 103
- 85
src/routes.php View File

@@ -8,8 +8,8 @@ use Slim\Http\Response;
8 8
  * Home page with basic template and category input form.
9 9
  */
10 10
 $app->get( '/', function (Request $request, Response $response, array $args) {
11
-  // Render index view
12
-  return $this->renderer->render( $response, 'index.phtml', $args );
11
+	// Render index view
12
+	return $this->renderer->render( $response, 'index.phtml', $args );
13 13
 } );
14 14
 
15 15
 /**
@@ -17,74 +17,91 @@ $app->get( '/', function (Request $request, Response $response, array $args) {
17 17
  * MediaWiki server.
18 18
  */
19 19
 $app->get( '/category', function (Request $request, Response $response, array $args) {
20
-  $category = $request->getQueryParam('cat');
21
-
22
-  $queryParams = [
23
-    'action'      => 'query',
24
-    'format'      => 'json',
25
-    'generator'   => 'categorymembers',
26
-    'gcmtitle'    => "Category:$category",
27
-    'gcmlimit'    => '60',
28
-    'prop'        => 'extracts',
29
-    'exintro'     => '1',
30
-    'explaintext' => '1',
31
-  ];
32
-
33
-  $textStatistics = new TS\TextStatistics;
34
-  $pageList = [];
35
-  $error = null;
36
-  $endpoint = $this->get( 'settings' )['mwEndpoint'];
37
-
38
-  while ( true ) {
39
-    // Since we can only retrieve 20 extracts at a time, we'll need to continue
40
-    // through the full list:
41
-    if ( isset( $excontinue ) ) {
42
-      $queryParams['excontinue'] = $excontinue;
43
-    }
44
-
45
-    list( $categoryData, $apiStatus ) = apiRequest( $endpoint, $queryParams );
46
-
47
-    if ( $apiStatus !== 200 ) {
48
-      $error = 'MediaWiki API request failed.';
49
-      break;
50
-    }
51
-
52
-    if ( !isset( $categoryData['query']['pages'] ) ) {
53
-      $error = 'No pages found for category.';
54
-      break;
55
-    }
56
-
57
-    // Assign readability scores to each extract and build a list:
58
-    foreach ( $categoryData['query']['pages'] as $page ) {
59
-
60
-      // Skip if no intro or no extract for page in this part of the resultset:
61
-      if ( !isset( $page['extract'] ) || !strlen( $page['extract'] ) ) {
62
-        continue;
63
-      }
64
-
65
-      $flesch = $textStatistics->daleChallReadabilityScore( $page['extract'] );
66
-      $pageList[] = [ $page['title'], $page['extract'], $flesch ];
67
-
68
-    }
69
-
70
-    $excontinue = null;
71
-    if ( isset( $categoryData['continue']['excontinue']) ) {
72
-      $excontinue = $categoryData['continue']['excontinue'];
73
-    } else {
74
-      // No more extract results.
75
-      break;
76
-    }
77
-  }
78
-
79
-  // Initial sort by readability score:
80
-  usort( $pageList, function ($pageA, $pageB) {
81
-    return $pageA[2] <=> $pageB[2];
82
-  } );
83
-
84
-  return $response->withJson( [
85
-    'pagelist' => $pageList,
86
-    'error'    => $error
87
-  ] );
20
+	$category = $request->getQueryParam('cat');
21
+	$type = $request->getQueryParam('type');
22
+	
23
+	$queryParams = [
24
+		'action' => 'query',
25
+		'format' => 'json',
26
+		'generator' => 'categorymembers',
27
+		'gcmtitle' => "Category:$category",
28
+		'gcmlimit' => '60',
29
+		'prop' => 'extracts',
30
+		'exintro' => '1',
31
+		'explaintext' => '1',
32
+	];
33
+
34
+	$textStatistics = new TS\TextStatistics;
35
+	$pageList = [];
36
+	$error = null;
37
+	$endpoint = $this->get( 'settings' )['mwEndpoint'];
38
+
39
+	while ( true ) {
40
+		// Since we can only retrieve 20 extracts at a time, we'll need to continue
41
+		// through the full list:
42
+		if ( isset( $excontinue ) ) {
43
+			$queryParams['excontinue'] = $excontinue;
44
+		}
45
+		
46
+		list( $categoryData, $apiStatus ) = apiRequest( $endpoint, $queryParams );
47
+
48
+		if ( $apiStatus !== 200 ) {
49
+			$error = 'MediaWiki API request failed.';
50
+			break;
51
+		}
52
+
53
+		if ( !isset( $categoryData['query']['pages'] ) ) {
54
+			$error = 'No pages found for category.';
55
+			break;
56
+		}
57
+
58
+		// Assign readability scores to each extract and build a list:
59
+		foreach ( $categoryData['query']['pages'] as $page ) {
60
+
61
+			// Skip if no intro or no extract for page in this part of the resultset:
62
+			if ( !isset( $page['extract'] ) || !strlen( $page['extract'] ) ) {
63
+				continue;
64
+			}
65
+
66
+			$score = $textStatistics->daleChallReadabilityScore( $page['extract'] );
67
+			$pageList[] = [ $page['title'], $page['extract'], $score ];
68
+
69
+		}
70
+
71
+		$excontinue = null;
72
+		if ( isset( $categoryData['continue']['excontinue']) ) {
73
+			$excontinue = $categoryData['continue']['excontinue'];
74
+		} else {
75
+			// No more extract results.
76
+			break;
77
+		}
78
+	}
79
+
80
+	// Initial sort by readability score:
81
+	usort( $pageList, function ($pageA, $pageB) {
82
+		return $pageA[2] <=> $pageB[2];
83
+	} );
84
+
85
+	// Either send back raw JSON or return the home page with data
86
+	// the template can use to build a table:
87
+
88
+	if ( $type === 'json' ) {
89
+		return $response->withJson( [
90
+			'pageList' => $pageList,
91
+			'error' => $error
92
+		] );
93
+	} else {
94
+		$templateVars = [
95
+			'pageList' => $pageList,
96
+			'error' => $error,
97
+			'cat' => $category
98
+		];
99
+		return $this->renderer->render(
100
+			$response,
101
+			'index.phtml',
102
+			$templateVars
103
+		);
104
+	}
88 105
 } );
89 106
 
90 107
 /**
@@ -95,19 +112,20 @@ $app->get( '/category', function (Request $request, Response $response, array $a
95 112
  * @param array $queryParams collection of GET parameters
96 113
  * @return array containing json_decode()ed API response and HTTP status code
97 114
  */
98
-function apiRequest( $endpoint, $queryParams ) {
99
-  $queryString = http_build_query( $queryParams );
100
-
101
-  $ch = curl_init( $endpoint . $queryString );
102
-
103
-  curl_setopt( $ch, \CURLOPT_RETURNTRANSFER, 1 );
104
-
105
-  $apiResponse = curl_exec( $ch );
106
-  $apiStatus = curl_getinfo( $ch, CURLINFO_HTTP_CODE );
107
-
108
-  curl_close( $ch );
109
-
110
-  $categoryData = json_decode( $apiResponse, true );
111
-
112
-  return [ $categoryData, $apiStatus ];
115
+function apiRequest ( $endpoint, $queryParams ) {
116
+	$queryString = http_build_query( $queryParams );
117
+	
118
+	$ch = curl_init( $endpoint . $queryString );
119
+	
120
+	curl_setopt( $ch, \CURLOPT_RETURNTRANSFER, 1 );
121
+	curl_setopt( $ch, CURLOPT_USERAGENT, "MediaWikiCategoryReadability/0.0.1 (mediawiki@chaff.p1k3.com)" );
122
+	
123
+	$apiResponse = curl_exec( $ch );
124
+	$apiStatus = curl_getinfo( $ch, CURLINFO_HTTP_CODE );
125
+	
126
+	curl_close( $ch );
127
+	
128
+	$categoryData = json_decode( $apiResponse, true );
129
+	
130
+	return [ $categoryData, $apiStatus ];
113 131
 }

Loading…
Cancel
Save