• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OCHA-DAP / hdx-ckan / #5649

26 Aug 2024 11:34AM UTC coverage: 72.882% (-0.1%) from 72.998%
#5649

Pull #6412

coveralls-python

web-flow
Merge branch 'dev' into feature/HDX-9990-implement-new-contact-contributor-form
Pull Request #6412: HDX-9960 & HDX-9987 contact the contributor & HDX Connect new pages

146 of 321 new or added lines in 10 files covered. (45.48%)

1 existing line in 1 file now uncovered.

11750 of 16122 relevant lines covered (72.88%)

0.73 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

65.62
/ckanext-hdx_package/ckanext/hdx_package/views/dataset.py
1
import csv
1✔
2
import io
1✔
3
import json
1✔
4
import logging
1✔
5
import re
1✔
6

7
from flask import Blueprint, make_response
1✔
8
from flask.views import MethodView
1✔
9
from typing import Any, Optional, Union
1✔
10
from six import text_type
1✔
11

12
from ckan.types import Response
1✔
13
from ckan.lib.mailer import MailerException
1✔
14

15
import ckan.authz as authz
1✔
16
import ckan.lib.captcha as captcha
1✔
17
import ckan.lib.navl.dictization_functions as dictization_functions
1✔
18
import ckan.model as model
1✔
19
import ckan.plugins.toolkit as tk
1✔
20

21
import ckanext.hdx_package.helpers.analytics as analytics
1✔
22
import ckanext.hdx_package.helpers.custom_pages as cp_h
1✔
23
import ckanext.hdx_package.helpers.custom_validator as vd
1✔
24
import ckanext.hdx_package.helpers.membership_data as membership_data
1✔
25
import ckanext.hdx_search.helpers.search_history as search_history
1✔
26
import ckanext.hdx_package.controller_logic.dataset_view_logic as dataset_view_logic
1✔
27
from ckanext.hdx_package.controller_logic.dataset_contact_contributor import DatasetContactContributorLogic
1✔
28
from ckanext.hdx_package.controller_logic.dataset_request_access import DatasetRequestAccessLogic
1✔
29

30
from ckan.views.dataset import _setup_template_variables
1✔
31

32
from ckanext.hdx_package.helpers import resource_grouping
1✔
33
from ckanext.hdx_package.helpers.constants import PACKAGE_METADATA_FIELDS_MAP, RESOURCE_METADATA_FIELDS_MAP
1✔
34
from ckanext.hdx_package.helpers.membership_data import contributor_topics
1✔
35
from ckanext.hdx_package.helpers.helpers import filesize_format
1✔
36
from ckanext.hdx_package.helpers.util import find_approx_download
1✔
37
from ckanext.hdx_package.views.light_dataset import generic_search
1✔
38
from ckanext.hdx_theme.helpers.helpers import markdown_extract_strip
1✔
39
from ckanext.hdx_theme.util.jql import fetch_downloads_per_week_for_dataset
1✔
40
from ckanext.hdx_theme.util.light_redirect import check_redirect_needed
1✔
41

42
from ckanext.hdx_org_group.views.organization_join import set_custom_rect_logo_url
1✔
43

44
import ckanext.hdx_users.helpers.helpers as usr_h
1✔
45

46
log = logging.getLogger(__name__)
1✔
47

48
config = tk.config
1✔
49
get_action = tk.get_action
1✔
50
check_access = tk.check_access
1✔
51
request = tk.request
1✔
52
render = tk.render
1✔
53
abort = tk.abort
1✔
54
redirect = tk.redirect_to
1✔
55
_ = tk._
1✔
56
h = tk.h
1✔
57
g = tk.g
1✔
58

59
NotAuthorized = tk.NotAuthorized
1✔
60
NotFound = tk.ObjectNotFound
1✔
61
ValidationError = tk.ValidationError
1✔
62

63
hdx_dataset = Blueprint(u'hdx_dataset', __name__, url_prefix=u'/dataset')
1✔
64
hdx_search = Blueprint(u'hdx_search', __name__, url_prefix=u'/search')
1✔
65

66

67
@check_redirect_needed
1✔
68
def search():
1✔
69
    query_string = request.args.get('q', u'')
1✔
70
    if g.userobj and query_string:
1✔
71
        search_history.store_search(query_string, g.userobj.id)
×
72
    return generic_search(u'search/search.html')
1✔
73

74

75
@check_redirect_needed
1✔
76
def read(id):
1✔
77
    """
78
    Display the package, includes HDX additions for continuous browsing
79
    """
80

81
    context = {'model': model, 'session': model.Session,
1✔
82
               'user': g.user, 'for_view': True,
83
               'auth_user_obj': g.userobj}
84
    data_dict = {'id': id, 'include_tracking': True}
1✔
85

86
    # check if package exists
87
    try:
1✔
88
        pkg_dict = get_action('package_show')(context, data_dict)
1✔
89
        pkg = context['package']
1✔
90

91
        showcase_list = []
1✔
92
        # Needed because of showcase validation convert_package_name_or_id_to_id_for_type_dataset()
93
        current_pkg_type = pkg_dict.get('type')
1✔
94

95
        if current_pkg_type == 'dataset':
1✔
96
            context_showcase = {'model': model, 'session': model.Session,
1✔
97
                       'user': g.user, 'for_view': True,
98
                       'auth_user_obj': g.userobj}
99
            _showcase_list = get_action('ckanext_package_showcase_list')(context_showcase,
1✔
100
                                                                         {'package_id': pkg_dict['id']})
101
            if _showcase_list:
1✔
102
                showcase_list = sorted(_showcase_list, key=lambda i: i.get('metadata_modified'), reverse=True)
×
103
            pkg_dict['showcase_count'] = len(_showcase_list)
1✔
104
        else:
105
            return abort(404, _('Package type is not dataset'))
×
106
    except (NotFound, NotAuthorized):
×
107
        return abort(404, _('Dataset not found'))
×
108

109
    log.debug('Reading dataset {}: checking which resources can be previewed'.format(pkg_dict.get('name')))
1✔
110
    # can the resources be previewed?
111
    for resource in pkg_dict['resources']:
1✔
112
        resource_views = [] if resource.get('in_quarantine') is True else get_action('resource_view_list')(context, {
1✔
113
            'id': resource['id']})
114
        resource['has_views'] = bool(_find_default_view(resource, resource_views))
1✔
115
        resource['resource_views'] = resource_views
1✔
116

117
        # if helpers.is_ckan_domain(resource['url']):
118
        #     resource['url'] = helpers.make_url_relative(resource['url'])
119
        #
120
        # if resource.get('perma_link') and helpers.is_ckan_domain(resource['perma_link']):
121
        #     resource['perma_link'] = helpers.make_url_relative(resource['perma_link'])
122

123
    # dealing with resource grouping
124
    resource_grouping.set_show_groupings_flag(pkg_dict)
1✔
125
    if pkg_dict.get('x_show_grouping'):
1✔
126
        resource_grouping.add_other_grouping_if_needed(pkg_dict)
×
127

128
    package_type = pkg_dict['type'] or 'dataset'
1✔
129
    _setup_template_variables(context, {'id': id}, package_type=package_type)
1✔
130

131
    # package_saver.PackageSaver().render_package(c.pkg_dict, context)
132

133
    log.debug('Reading dataset {}: setting analytics data for template'.format(pkg_dict.get('name')))
1✔
134
    # set dataset type for google analytics - modified by HDX
135
    analytics_is_cod = analytics.is_cod(pkg_dict)
1✔
136
    # c.analytics_is_indicator = analytics.is_indicator(c.pkg_dict)
137
    analytics_is_archived = analytics.is_archived(pkg_dict)
1✔
138
    analytics_group_names, analytics_group_ids = analytics.extract_locations_in_json(pkg_dict)
1✔
139
    analytics_dataset_availability = analytics.dataset_availability(pkg_dict)
1✔
140

141
    # changes done for indicator
142
    act_data_dict = {'id': pkg_dict['id'], 'limit': 7}
1✔
143
    log.debug('Reading dataset {}: getting activity list for dataset'.format(pkg_dict.get('name')))
1✔
144

145
    hdx_activities = get_action(u'package_activity_list')(context, act_data_dict)
1✔
146

147
    pkg_dict['approx_total_downloads'] = find_approx_download(pkg_dict.get('total_res_downloads', 0))
1✔
148

149
    # Constructing the email body
150
    log.debug('Reading dataset {}: constructing email body'.format(pkg_dict.get('name')))
1✔
151
    notes = pkg_dict.get('notes') if pkg_dict.get('notes') else _('No description available')
1✔
152
    pkg_dict['social_mail_body'] = _('Description:%0D%0A') + h.markdown_extract(
1✔
153
        notes) + ' %0D%0A'
154

155
    membership = membership_data.get_membership_by_user(g.user or g.author, pkg.owner_org, g.userobj)
1✔
156

157
    user_has_edit_rights = h.check_access('package_update', {'id': pkg_dict['id']})
1✔
158

159
    # analytics charts
160
    log.debug('Reading dataset {}: getting data for analytics charts'.format(pkg_dict.get('name')))
1✔
161
    downloads_last_weeks = fetch_downloads_per_week_for_dataset(pkg_dict['id']).values()
1✔
162
    stats_downloads_last_weeks = list(downloads_last_weeks)
1✔
163

164
    # tags&custom_pages
165
    log.debug('Reading dataset {}: finding custom page list for this dataset'.format(pkg_dict.get('name')))
1✔
166
    pkg_dict['page_list'] = cp_h.hdx_get_page_list_for_dataset(context, pkg_dict)
1✔
167

168
    # links to vizualizations
169
    log.debug('Reading dataset {}: finding links list for this dataset'.format(pkg_dict.get('name')))
1✔
170
    pkg_dict['links_list'] = get_action('hdx_package_links_by_id_list')(context, {'id': pkg_dict.get('name')})
1✔
171

172
    log.debug('Reading dataset {}: deciding on the dataset visualization/preview'.format(pkg_dict.get('name')))
1✔
173
    _dataset_preview = None
1✔
174
    if 'resources' in pkg_dict:
1✔
175
        _dataset_preview = pkg_dict.get('dataset_preview', vd._DATASET_PREVIEW_FIRST_RESOURCE)
1✔
176

177
    org_dict = pkg_dict.get('organization') or {}
1✔
178
    org_id = org_dict.get('id', None)
1✔
179
    org_info_dict = _get_org_extras(org_id)
1✔
180
    user_survey_url = org_info_dict.get('user_survey_url')
1✔
181
    pkg_dict['user_survey_url'] = user_survey_url
1✔
182
    if org_info_dict.get('custom_org', False):
1✔
183
        logo_config = _process_customizations(org_info_dict.get('customization', None))
×
184
    else:
185
        logo_config = {}
1✔
186

187
    template_data = {
1✔
188
        'pkg_dict': pkg_dict,
189
        'pkg': pkg,
190
        'showcase_list': showcase_list,
191
        'hdx_activities': hdx_activities,
192
        'membership': membership,
193
        'user_has_edit_rights': user_has_edit_rights,
194
        'analytics_is_cod': analytics_is_cod,
195
        'analytics_is_indicator': 'false',
196
        'analytics_is_archived': analytics_is_archived,
197
        'analytics_group_names': analytics_group_names,
198
        'analytics_group_ids': analytics_group_ids,
199
        'analytics_dataset_availability': analytics_dataset_availability,
200
        'stats_downloads_last_weeks': stats_downloads_last_weeks,
201
        'user_survey_url': user_survey_url,
202
        'logo_config': logo_config,
203
    }
204

205
    if _dataset_preview != vd._DATASET_PREVIEW_NO_PREVIEW:
1✔
206
        view_enabled_resources = [r for r in pkg_dict['resources'] if
1✔
207
                                  r.get('no_preview') != 'true' and r.get('in_quarantine') is not True]
208
        dataset_preview_enabled_list = []
1✔
209
        dataset_preview_disabled_list = []
1✔
210
        if _dataset_preview == vd._DATASET_PREVIEW_RESOURCE_ID:
1✔
211
            for r in view_enabled_resources:
×
212
                if r.get('dataset_preview_enabled') is True:
×
213
                    dataset_preview_enabled_list.append(r)
×
214
                else:
215
                    dataset_preview_disabled_list.append(r)
×
216
            dataset_preview_enabled_list.extend(dataset_preview_disabled_list)
×
217
            view_enabled_resources = dataset_preview_enabled_list
×
218
        for r in view_enabled_resources:
1✔
219
            _res_view = _check_resource(r)
1✔
220
            if _res_view is None:
1✔
221
                continue
1✔
222
            if _res_view.get('type') == 'hdx_geo_preview':
×
223
                template_data['shapes'] = json.dumps(
×
224
                    dataset_view_logic.process_shapes(pkg_dict['resources'], r.get('id')))
225
                return render('package/hdx-read-shape.html', template_data)
×
226
            if _res_view.get('type') == 'hdx_hxl_preview':
×
227
                template_data['default_view'] = _res_view
×
228
                has_modify_permission = authz.is_authorized_boolean('package_update', context, {'id': pkg_dict['id']})
×
229
                template_data['hxl_preview_urls'] = {
×
230
                    'onlyView': get_action('hxl_preview_iframe_url_show')({
231
                        'has_modify_permission': has_modify_permission
232
                    }, {
233
                        'resource': _res_view.get('resource'),
234
                        'resource_view': _res_view.get('view'),
235
                        'hxl_preview_mode': 'onlyView'
236
                    })
237
                    # 'edit': get_action('hxl_preview_iframe_url_show')({}, {
238
                    #     'resource': _default_view.get('resource'),
239
                    #     'resource_view': _default_view.get('view'),
240
                    #     'hxl_preview_mode': 'edit'
241
                    # })
242
                }
243
                return render('package/hdx-read-hxl.html', template_data)
×
244

245
    log.debug('Reading dataset {}: rendering template'.format(pkg_dict.get('name')))
1✔
246
    if org_info_dict.get('custom_org', False):
1✔
247
        return render('package/custom_hdx_read.html', template_data)
×
248
    return render('package/hdx_read.html', template_data)
1✔
249

250

251
def _get_org_extras(org_id):
1✔
252
    """
253
    Get the extras for our orgs
254
    """
255
    if not org_id:
1✔
256
        return {}
×
257
    context = {'model': model, 'session': model.Session,
1✔
258
               'user': g.user or g.author,
259
               'include_datasets': False,
260
               'for_view': True}
261
    data_dict = {'id': org_id}
1✔
262
    org_info = get_action(
1✔
263
        'hdx_light_group_show')(context, data_dict)
264

265
    extras_dict = {item['key']: item['value'] for item in org_info.get('extras', {})}
1✔
266
    extras_dict['image_url'] = org_info.get('image_url', None)
1✔
267

268
    return extras_dict
1✔
269

270

271
def _process_customizations(json_string):
1✔
272
    """
273
    Process settings for datasets belonging to custom layouts
274
    """
275
    logo_config = {
×
276
        'logo_bg_color': '',
277
        'highlight_color': '',
278
        'custom_org': True
279
    }
280
    if json_string:
×
281
        custom_dict = json.loads(json_string)
×
282
        highlight_color = custom_dict.get('highlight_color', None)
×
283
        if highlight_color:
×
284
            logo_config['highlight_color'] = highlight_color
×
285
        logo_bg_color = custom_dict.get('logo_bg_color', None)
×
286
        if logo_bg_color:
×
287
            logo_config['logo_bg_color'] = logo_bg_color
×
288
        image_name = custom_dict.get('image_rect', None)
×
289
        if image_name:
×
290
            logo_config['image_url'] = h.url_for('hdx_local_image_server.org_file', filename=image_name)
×
291

292
    return logo_config
×
293

294

295
def _find_default_view(resource, resource_views):
1✔
296
    default_resource_view = resource_views[0] if resource_views else None
1✔
297
    res_format = (resource.get('format', None) or '').lower()
1✔
298
    if 'xlsx' in res_format:
1✔
299
        default_resource_view = next(
×
300
            (rv for rv in resource_views if rv.get('view_type') == 'hdx_hxl_preview'),
301
            default_resource_view)
302

303
    return default_resource_view
1✔
304

305

306
def _check_resource(resource):
1✔
307
    shape_info = dataset_view_logic.has_shape_info(resource)
1✔
308
    if shape_info:
1✔
309
        return shape_info
×
310
    hxl_preview = _has_hxl_views(resource)
1✔
311
    if hxl_preview:
1✔
312
        return hxl_preview
×
313
    return None
1✔
314

315

316
def _has_hxl_views(resource):
1✔
317
    for view in resource.get("resource_views"):
1✔
318
        if view.get("view_type") == 'hdx_hxl_preview':
1✔
319
            return {
×
320
                'type': 'hdx_hxl_preview',
321
                "view_url": h.url_for("resource_view", id=view.get('package_id'),
322
                                      resource_id=view.get('resource_id'), view_id=view.get('id')),
323
                "view": view,
324
                "resource": resource
325
            }
326
    return None
1✔
327

328

329
def delete(id):
1✔
330
    """
331
    Delete package, HDX changed the redirection point
332
    """
333
    if 'cancel' in request.params:
1✔
334
        h.redirect_to(controller='package', action='edit', id=id)
×
335

336
    context = {'model': model, 'session': model.Session,
1✔
337
               'user': g.user or g.author, 'auth_user_obj': g.userobj}
338

339
    try:
1✔
340
        check_access('package_delete', context, {'id': id})
1✔
341
    except NotAuthorized:
×
342
        return abort(403, _('Unauthorized to delete package %s') % '')
×
343

344
    try:
1✔
345
        if request.method == 'POST':
1✔
346
            get_action('hdx_dataset_purge')(context, {'id': id})  # Create new action to fully delete
1✔
347
            h.flash_notice(_('Dataset has been deleted.'))
1✔
348
            return h.redirect_to('dashboard.datasets')
1✔
349
        pkg_dict = get_action('package_show')(context, {'id': id})
×
350
        # dataset_type = pkg_dict['type'] or 'dataset'
351
    except NotAuthorized:
×
352
        return abort(403, _('Unauthorized to delete package %s') % '')
×
353
    except NotFound:
×
354
        return abort(404, _('Dataset not found'))
×
355
    return render('package/confirm_delete.html', {'pkg_dict': pkg_dict})
×
356

357

358
def package_metadata(id):
1✔
359
    '''
360
        Handles downloading .CSV and .JSON package metadata files
361

362
        :returns: json or csv file
363
    '''
364

365
    context = {
1✔
366
        'model': model,
367
        'session': model.Session,
368
        'user': g.user or g.author,
369
        'auth_user_obj': g.userobj
370
    }
371

372
    # check if package exists
373
    try:
1✔
374
        pkg_dict = get_action('package_show')(context, {'id': id})
1✔
375

376
        metadata_fields = PACKAGE_METADATA_FIELDS_MAP
1✔
377
        metadata_resource_fields = RESOURCE_METADATA_FIELDS_MAP
1✔
378

379
        # limit fields
380
        metadata = {field: pkg_dict[field] if field in pkg_dict else None for field, field_data in
1✔
381
                    metadata_fields.items()}
382

383
        file_format = request.params.get('format', '')
1✔
384
        filename = 'metadata-%s' % metadata.get('name')
1✔
385

386
        # add resources
387
        metadata['resources'] = []
1✔
388
        for r in pkg_dict['resources']:
1✔
389
            output_resource_dict = {field: r[field] if field in r else None for field, field_data in
1✔
390
                                    metadata_resource_fields.items()}
391
            # extra processing
392
            output_resource_dict = _process_resource_metadata(output_resource_dict, metadata_resource_fields,
1✔
393
                                                              file_format)
394
            metadata['resources'].append(output_resource_dict)
1✔
395

396
        # process fields
397
        metadata = _process_dataset_metadata(metadata, metadata_fields, file_format)
1✔
398

399
        # analytics.MetadataDownloadAnalyticsSender(file_format=file_format, package_id=id).send_to_queue()
400

401
        buf = io.StringIO()
1✔
402
        if 'json' in file_format:
1✔
403
            json.dump(metadata, buf, indent=4)
1✔
404

405
            output = make_response(buf.getvalue())
1✔
406
            output.headers['Content-Type'] = 'application/json'
1✔
407
            output.headers['Content-Disposition'] = 'attachment; filename="%s.json"' % filename
1✔
408

409
            return output
1✔
410
        elif 'csv' in file_format:
1✔
411
            writer = csv.writer(buf)
1✔
412

413
            # header
414
            writer.writerow(['Field', 'Label', 'Value'])
1✔
415

416
            # content
417
            for k, v in metadata.items():
1✔
418
                label_value = metadata_fields[k] if k in metadata_fields else metadata_resource_fields[
1✔
419
                    re.sub('^resource_([0-9]+)_', '', k)] if k.startswith('resource_') else None
420
                writer.writerow([k, label_value, v])
1✔
421

422
            output = make_response(buf.getvalue())
1✔
423
            output.headers['Content-Type'] = 'text/csv'
1✔
424
            output.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % filename
1✔
425

426
            return output
1✔
427
    except NotFound:
1✔
428
        return abort(404, _('Dataset not found'))
×
429
    except NotAuthorized:
1✔
430
        return abort(404, _('Dataset not found'))
1✔
431

432
    return abort(404, _('Invalid file format'))
×
433

434

435
def resource_metadata(id, resource_id):
1✔
436
    '''
437
        Handles downloading .CSV and .JSON resource metadata files
438

439
        :returns: json or csv file
440
    '''
441

442
    context = {
1✔
443
        'model': model,
444
        'session': model.Session,
445
        'user': g.user or g.author,
446
        'auth_user_obj': g.userobj
447
    }
448

449
    # check if resource exists
450
    try:
1✔
451
        resource_dict = get_action('resource_show')(context, {'id': resource_id})
1✔
452

453
        metadata_fields = RESOURCE_METADATA_FIELDS_MAP
1✔
454

455
        # limit fields
456
        metadata = {field: resource_dict[field] if field in resource_dict else None for field, field_data in
1✔
457
                    metadata_fields.items()}
458

459
        file_format = request.params.get('format', '')
1✔
460
        filename = 'metadata-%s' % h.hdx_munge_title(metadata.get('name'))
1✔
461

462
        # process fields
463
        metadata = _process_resource_metadata(metadata, metadata_fields, file_format)
1✔
464

465
        # analytics.MetadataDownloadAnalyticsSender(file_format=file_format, resource_id=resource_id).send_to_queue()
466

467
        buf = io.StringIO()
1✔
468
        if 'json' in file_format:
1✔
469
            json.dump(metadata, buf, indent=4)
1✔
470

471
            output = make_response(buf.getvalue())
1✔
472
            output.headers['Content-Type'] = 'application/json'
1✔
473
            output.headers['Content-Disposition'] = 'attachment; filename="%s.json"' % filename
1✔
474

475
            return output
1✔
476
        elif 'csv' in file_format:
1✔
477
            writer = csv.writer(buf)
1✔
478

479
            # header
480
            writer.writerow(['Field', 'Label', 'Value'])
1✔
481

482
            # content
483
            for k, v in metadata.items():
1✔
484
                writer.writerow([k, metadata_fields[k] if k in metadata_fields else None, v])
1✔
485

486
            output = make_response(buf.getvalue())
1✔
487
            output.headers['Content-Type'] = 'text/csv'
1✔
488
            output.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % filename
1✔
489

490
            return output
1✔
491
    except NotFound:
1✔
492
        return abort(404, _('Resource not found'))
×
493
    except NotAuthorized:
1✔
494
        return abort(404, _('Resource not found'))
1✔
495
    return abort(404, _('Invalid file format'))
×
496

497

498
def _normalize_metadata_lists(old_dict: dict) -> dict:
1✔
499
    new_dict = {}
1✔
500

501
    for dict_key, dict_value in old_dict.items():
1✔
502
        if isinstance(dict_value, list):
1✔
503
            for list_key, list_value in enumerate(dict_value, start=1):
1✔
504
                if isinstance(list_value, dict):
1✔
505
                    for k, v in list_value.items():
1✔
506
                        new_dict['%s_%s_%s' % (dict_key, list_key, k)] = v
1✔
507
                else:
508
                    new_dict[dict_key] = ', '.join(dict_value)
1✔
509
        else:
510
            new_dict[dict_key] = dict_value
1✔
511

512
    return new_dict
1✔
513

514

515
def _process_dataset_metadata(metadata_dict: dict, fields: dict, file_format: str) -> dict:
1✔
516
    if 'notes' in fields:
1✔
517
        metadata_dict['notes'] = markdown_extract_strip(metadata_dict.get('notes'), 0)
1✔
518
    if 'organization' in fields:
1✔
519
        metadata_dict['organization'] = metadata_dict.get('organization').get('title')
1✔
520
    if 'data_update_frequency' in fields:
1✔
521
        data_update_frequency_value = h.hdx_get_frequency_by_value(metadata_dict.get('data_update_frequency'))
1✔
522
        metadata_dict['data_update_frequency'] = data_update_frequency_value or metadata_dict.get(
1✔
523
            'data_update_frequency')
524
    if 'groups' in fields:
1✔
525
        metadata_dict['groups'] = [group['display_name'] for group in metadata_dict.get('groups')]
1✔
526
    if 'tags' in fields:
1✔
527
        metadata_dict['tags'] = [tag['display_name'] for tag in metadata_dict.get('tags')]
1✔
528

529
    if 'csv' in file_format:
1✔
530
        # rename keys
531
        if 'resources' in metadata_dict:
1✔
532
            metadata_dict['resource'] = metadata_dict['resources']
1✔
533
            del metadata_dict['resources']
1✔
534

535
        metadata_dict = _normalize_metadata_lists(metadata_dict)
1✔
536

537
    return metadata_dict
1✔
538

539

540
def _process_resource_metadata(metadata_dict: dict, fields: dict, file_format: str) -> dict:
1✔
541
    if 'size' in fields:
1✔
542
        metadata_dict['size'] = filesize_format(metadata_dict.get('size'))
1✔
543
    if 'description' in fields:
1✔
544
        metadata_dict['description'] = markdown_extract_strip(metadata_dict.get('description'), 0)
1✔
545

546
    # rename keys
547
    if 'package_id' in fields:
1✔
548
        metadata_dict['dataset_id'] = metadata_dict['package_id']
1✔
549
        del metadata_dict['package_id']
1✔
550

551
    return metadata_dict
1✔
552

553

554
class DatasetContactContributorView(MethodView):
1✔
555

556
    def post(self, id: str) -> Union[Response, str]:
1✔
NEW
557
        context = {
×
558
            u'model': model,
559
            u'session': model.Session,
560
            u'user': g.user or g.author,
561
            u'auth_user_obj': g.userobj,
562
        }
563

NEW
564
        try:
×
NEW
565
            pkg_dict = get_action('package_show')(context, {'id': id})
×
566

NEW
567
            if pkg_dict.get('is_requestdata_type'):
×
NEW
568
                return abort(404, _('Dataset not found'))
×
569

NEW
570
            check_access(u'hdx_send_mail_contributor', context)
×
571

NEW
572
            dataset_contact_contributor_logic = DatasetContactContributorLogic(context, request)
×
573

NEW
574
            data_dict = None
×
NEW
575
            try:
×
NEW
576
                data_dict = dataset_contact_contributor_logic.read()
×
NEW
577
            except dictization_functions.DataError:
×
NEW
578
                abort(400, _(u'Integrity Error'))
×
579

NEW
580
            data, errors = dataset_contact_contributor_logic.validate(data_dict)
×
NEW
581
            if errors:
×
NEW
582
                return self.get(id, data, errors)
×
583

NEW
584
            usr_h.is_valid_captcha(request.form.get('g-recaptcha-response'))
×
585

NEW
586
            dataset_contact_contributor_logic.send_mail()
×
587

NEW
588
            analytics_dict = h.hdx_compute_analytics(pkg_dict)
×
589

NEW
590
            extra_vars = {
×
591
                u'pkg_dict': pkg_dict,
592
                u'analytics': analytics_dict,
593
                u'message_subject': request.form.get('topic'),
594
                u'message_sent': True,
595
            }
NEW
596
            return render('package/contact_contributor.html', extra_vars=extra_vars)
×
597

NEW
598
        except NotFound:
×
NEW
599
            return abort(404, _('Dataset not found'))
×
600

NEW
601
        except NotAuthorized:
×
NEW
602
            came_from = h.url_for('hdx_dataset.contact_contributor', id=pkg_dict.get('name'))
×
NEW
603
            return redirect(h.url_for('hdx_signin.login', info_message_type='contact-contributor', came_from=came_from))
×
604

NEW
605
        except captcha.CaptchaError:
×
NEW
606
            error_summary = _(u'Bad Captcha. Please try again.')
×
NEW
607
            log.error(error_summary)
×
NEW
608
            return self.get(id, data, errors, error_summary)
×
609

NEW
610
        except MailerException as e:
×
NEW
611
            error_summary = _('Could not send request for: %s') % text_type(e)
×
NEW
612
            log.error(error_summary)
×
NEW
613
            return self.get(id, data, errors, error_summary)
×
614

NEW
615
        except ValidationError as e:
×
NEW
616
            error_summary = e.error_summary
×
NEW
617
            log.error(error_summary)
×
NEW
618
            return self.get(id, data, errors, error_summary)
×
619

NEW
620
        except Exception as e:
×
NEW
621
            error_summary = _('Request can not be sent. Contact an administrator')
×
NEW
622
            log.error(error_summary)
×
NEW
623
            return self.get(id, data, errors, error_summary)
×
624

625
    def get(self, id: str,
1✔
626
            data: Optional[dict[str, Any]] = None,
627
            errors: Optional[dict[str, Any]] = None,
628
            error_summary: Optional[str] = None):
NEW
629
        context = {
×
630
            u'model': model,
631
            u'session': model.Session,
632
            u'user': g.user or g.author,
633
            u'auth_user_obj': g.userobj,
634
        }
635

NEW
636
        try:
×
NEW
637
            pkg_dict = get_action('package_show')(context, {'id': id})
×
638

NEW
639
            if pkg_dict.get('is_requestdata_type'):
×
NEW
640
                return abort(404, _('Dataset not found'))
×
641

NEW
642
            check_access(u'hdx_send_mail_contributor', context)
×
643

NEW
644
            analytics_dict = h.hdx_compute_analytics(pkg_dict)
×
645

NEW
646
            extra_vars = {
×
647
                u'pkg_dict': pkg_dict,
648
                u'analytics': analytics_dict,
649
                u'contact_topics': contributor_topics,
650
                u'data': data or {},
651
                u'errors': errors or {},
652
                u'error_summary': error_summary or '',
653
            }
NEW
654
            return render('package/contact_contributor.html', extra_vars=extra_vars)
×
655

NEW
656
        except NotFound:
×
NEW
657
            return abort(404, _('Dataset not found'))
×
658

NEW
659
        except NotAuthorized:
×
NEW
660
            came_from = h.url_for('hdx_dataset.contact_contributor', id=pkg_dict.get('name'))
×
NEW
661
            return redirect(h.url_for('hdx_signin.login', info_message_type='contact-contributor', came_from=came_from))
×
662

663

664
class DatasetRequestAccessView(MethodView):
1✔
665

666
    def post(self, id: str) -> Union[Response, str]:
1✔
667
        context = {
1✔
668
            u'model': model,
669
            u'session': model.Session,
670
            u'user': g.user or g.author,
671
            u'auth_user_obj': g.userobj,
672
        }
673

674
        try:
1✔
675
            pkg_dict = get_action('package_show')(context, {'id': id})
1✔
676

677
            check_access(u'hdx_request_access', context)
1✔
678

679
            if not pkg_dict.get('is_requestdata_type'):
1✔
NEW
680
                return abort(404, _('Dataset not request data type'))
×
681

682
            pending_request = h.hdx_pending_request_data(g.userobj.id, pkg_dict.get('id'))
1✔
683
            if len(pending_request) > 0:
1✔
NEW
684
                return redirect('hdx_dataset.request_access', id=pkg_dict.get('name'))
×
685

686
            dataset_request_access_logic = DatasetRequestAccessLogic(context, request)
1✔
687

688
            data_dict = None
1✔
689
            try:
1✔
690
                data_dict = dataset_request_access_logic.read()
1✔
NEW
691
            except dictization_functions.DataError:
×
NEW
692
                abort(400, _(u'Integrity Error'))
×
693

694
            data, errors = dataset_request_access_logic.validate(data_dict)
1✔
695
            if errors:
1✔
NEW
696
                return self.get(id, data, errors)
×
697

698
            request_sent, send_request_message = dataset_request_access_logic.send_request()
1✔
699

700
            if request_sent:
1✔
701
                analytics_dict = h.hdx_compute_analytics(pkg_dict)
1✔
702

703
                extra_vars = {
1✔
704
                    u'pkg_dict': pkg_dict,
705
                    u'analytics': analytics_dict,
706
                    u'request_sent': request_sent,
707
                }
708
                return render('package/request_access.html', extra_vars=extra_vars)
1✔
709
            else:
NEW
710
                error_summary = send_request_message
×
NEW
711
                log.error(error_summary)
×
NEW
712
                return self.get(id, data, errors, error_summary)
×
713

NEW
714
        except NotFound:
×
NEW
715
            return abort(404, _('Dataset not found'))
×
716

NEW
717
        except NotAuthorized:
×
NEW
718
            came_from = h.url_for('hdx_dataset.request_access', id=pkg_dict.get('name'))
×
NEW
719
            return redirect(h.url_for('hdx_signin.login', info_message_type='hdx-connect', came_from=came_from))
×
720

NEW
721
        except MailerException as e:
×
NEW
722
            error_summary = _('Could not send request for: %s') % text_type(e)
×
NEW
723
            log.error(error_summary)
×
NEW
724
            return self.get(id, data, errors, error_summary)
×
725

NEW
726
        except ValidationError as e:
×
NEW
727
            error_summary = e.error_summary
×
NEW
728
            log.error(error_summary)
×
NEW
729
            return self.get(id, data, errors, error_summary)
×
730

NEW
731
        except Exception as e:
×
NEW
732
            error_summary = _('Request can not be sent. Contact an administrator')
×
NEW
733
            log.error(error_summary)
×
NEW
734
            return self.get(id, data, errors, error_summary)
×
735

736
    def get(self, id: str,
1✔
737
            data: Optional[dict[str, Any]] = None,
738
            errors: Optional[dict[str, Any]] = None,
739
            error_summary: Optional[str] = None):
NEW
740
        context = {
×
741
            u'model': model,
742
            u'session': model.Session,
743
            u'user': g.user or g.author,
744
            u'auth_user_obj': g.userobj,
745
        }
746

NEW
747
        try:
×
NEW
748
            pkg_dict = get_action('package_show')(context, {'id': id})
×
749

NEW
750
            check_access(u'hdx_request_access', context)
×
751

NEW
752
            if not pkg_dict.get('is_requestdata_type'):
×
NEW
753
                return abort(404, _('Dataset not request data type'))
×
754

NEW
755
            pending_request = h.hdx_pending_request_data(g.userobj.id, pkg_dict.get('id'))
×
NEW
756
            if pending_request:
×
NEW
757
                if not error_summary:
×
NEW
758
                    error_summary = _('You already have a pending request. Please wait for the reply.')
×
759

NEW
760
            org_dict = get_action(u'organization_show')(context, {'id': pkg_dict.get('organization', {}).get('id')})
×
NEW
761
            set_custom_rect_logo_url(org_dict)
×
762

NEW
763
            analytics_dict = h.hdx_compute_analytics(pkg_dict)
×
764

NEW
765
            extra_vars = {
×
766
                u'pkg_dict': pkg_dict,
767
                u'analytics': analytics_dict,
768
                u'org_dict': org_dict,
769
                u'pending_request': pending_request,
770
                u'data': data or {},
771
                u'errors': errors or {},
772
                u'error_summary': error_summary or '',
773
            }
NEW
774
            return render('package/request_access.html', extra_vars=extra_vars)
×
775

NEW
776
        except NotFound:
×
NEW
777
            return abort(404, _('Dataset not found'))
×
778

NEW
779
        except NotAuthorized:
×
NEW
780
            came_from = h.url_for('hdx_dataset.request_access', id=pkg_dict.get('name'))
×
NEW
781
            return redirect(h.url_for('hdx_signin.login', info_message_type='hdx-connect', came_from=came_from))
×
782

783

784
hdx_search.add_url_rule(u'/', view_func=search, strict_slashes=False)
1✔
785
hdx_dataset.add_url_rule(u'/', view_func=search, strict_slashes=False)
1✔
786
hdx_dataset.add_url_rule(u'<id>', view_func=read)
1✔
787
hdx_dataset.add_url_rule(u'/delete/<id>', view_func=delete, methods=[u'GET', u'POST'])
1✔
788
hdx_dataset.add_url_rule(u'/<id>/contact/',
1✔
789
                         view_func=DatasetContactContributorView.as_view(str(u'contact_contributor')),
790
                         methods=[u'GET', u'POST'], strict_slashes=False)
791
hdx_dataset.add_url_rule(u'/<id>/request-access/',
1✔
792
                         view_func=DatasetRequestAccessView.as_view(str(u'request_access')),
793
                         methods=[u'GET', u'POST'], strict_slashes=False)
794
hdx_dataset.add_url_rule(u'<id>/download_metadata', view_func=package_metadata)
1✔
795
hdx_dataset.add_url_rule(u'<id>/resource/<resource_id>/download_metadata', view_func=resource_metadata)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc