• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

zopefoundation / Products.CMFCore / 6246931310

20 Sep 2023 09:54AM UTC coverage: 86.008% (-0.3%) from 86.266%
6246931310

Pull #131

github

mauritsvanrees
gha: don't need setup-python on 27 as we use the 27 container.
Pull Request #131: Make decodeFolderFilter and encodeFolderFilter non-public.

2466 of 3689 branches covered (0.0%)

Branch coverage included in aggregate %.

6 of 6 new or added lines in 1 file covered. (100.0%)

17297 of 19289 relevant lines covered (89.67%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

93.56
/src/Products/CMFCore/exportimport/content.py
1
##############################################################################
2
#
3
# Copyright (c) 2005 Zope Foundation and Contributors.
4
#
5
# This software is subject to the provisions of the Zope Public License,
6
# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
7
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
8
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
9
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
10
# FOR A PARTICULAR PURPOSE.
11
#
12
##############################################################################
13
"""Filesystem exporter / importer adapters.
1✔
14
"""
15

16
import itertools
1✔
17
import operator
1✔
18
from csv import reader
1✔
19
from csv import writer
1✔
20

21
import six
1✔
22
from six import StringIO
1✔
23
from six.moves.configparser import ConfigParser
1✔
24

25
from DateTime import DateTime
1✔
26
from zope.component import getUtility
1✔
27
from zope.interface import implementer
1✔
28
from zope.publisher.interfaces.http import MethodNotAllowed
1✔
29

30
from Products.GenericSetup.content import DAVAwareFileAdapter
1✔
31
from Products.GenericSetup.content import _globtest
1✔
32
from Products.GenericSetup.interfaces import IFilesystemExporter
1✔
33
from Products.GenericSetup.interfaces import IFilesystemImporter
1✔
34

35
from ..interfaces import ITypesTool
1✔
36

37

38
#
39
#   setup_tool handlers
40
#
41
def exportSiteStructure(context):
1✔
42
    IFilesystemExporter(context.getSite()).export(context, 'structure', True)
1✔
43

44

45
def importSiteStructure(context):
1✔
46
    IFilesystemImporter(context.getSite()).import_(context, 'structure', True)
1✔
47

48

49
def encode_if_needed(text, encoding):
1✔
50
    if six.PY2:
1!
51
        if isinstance(text, six.text_type):
×
52
            text = text.encode(encoding)
×
53
    else:
54
        if not isinstance(text, six.text_type):
1✔
55
            text = text.decode(encoding)
1✔
56
        # no need to encode;
57
        # let's avoid double encoding in case of encoded string
58
    return text
1✔
59

60

61
class FolderishDAVAwareFileAdapter(DAVAwareFileAdapter):
1✔
62
    """ A version of the DAVAwareFileAdapter that uses .properties to store
63
    the DAV result, rather than its own id. For use in serialising folderish
64
    objects. """
65

66
    def _getFileName(self):
1✔
67
        """ Return the name under which our file data is stored.
68
        """
69
        return '.properties'
1✔
70

71

72
#
73
#   Filesystem export/import adapters
74
#
75
@implementer(IFilesystemExporter, IFilesystemImporter)
1✔
76
class StructureFolderWalkingAdapter(object):
1✔
77
    """ Tree-walking exporter for "folderish" types.
78

79
    Folderish instances are mapped to directories within the 'structure'
80
    portion of the profile, where the folder's relative path within the site
81
    corresponds to the path of its directory under 'structure'.
82

83
    The subobjects of a folderish instance are enumerated in the '.objects'
84
    file in the corresponding directory.  This file is a CSV file, with one
85
    row per subobject, with the following wtructure::
86

87
     "<subobject id>","<subobject portal_type>"
88

89
    Subobjects themselves are represented as individual files or
90
    subdirectories within the parent's directory.
91
    If the import step finds that any objects specified to be created by the
92
    'structure' directory setup already exist, these objects will be deleted
93
    and then recreated by the profile.  The existence of a '.preserve' file
94
    within the 'structure' hierarchy allows specification of objects that
95
    should not be deleted.  '.preserve' files should contain one preserve
96
    rule per line, with shell-style globbing supported (i.e. 'b*' will match
97
    all objects w/ id starting w/ 'b'.
98

99
    Similarly, a '.delete' file can be used to specify the deletion of any
100
    objects that exist in the site but are NOT in the 'structure' hierarchy,
101
    and thus will not be recreated during the import process.
102
    """
103

104
    def __init__(self, context):
1✔
105
        self.context = context
1✔
106
        self._encoding = self.context.getProperty('default_charset', 'utf-8')
1✔
107

108
    def read_data_file(self, import_context, datafile, subdir):
1✔
109
        out = import_context.readDataFile(datafile, subdir)
1✔
110
        if out is None:
1✔
111
            return out
1✔
112
        return encode_if_needed(out, self._encoding)
1✔
113

114
    def export(self, export_context, subdir, root=False):
1✔
115
        """ See IFilesystemExporter.
116
        """
117
        content_type = 'text/comma-separated-values'
1✔
118

119
        # Enumerate exportable children
120
        exportable = self.context.contentItems()
1✔
121
        exportable = [x + (IFilesystemExporter(x, None),) for x in exportable]
1✔
122
        exportable = [x for x in exportable if x[1] is not None]
1✔
123

124
        objects_stream = StringIO()
1✔
125
        objects_csv_writer = writer(objects_stream)
1✔
126
        wf_stream = StringIO()
1✔
127
        wf_csv_writer = writer(wf_stream)
1✔
128

129
        if not root:
1✔
130
            subdir = '%s/%s' % (subdir, self.context.getId())
1✔
131

132
        try:
1✔
133
            wft = self.context.portal_workflow
1✔
134
        except AttributeError:
1✔
135
            # No workflow tool to export definitions from
136
            for object_id, object, ignored in exportable:
1✔
137
                objects_csv_writer.writerow((object_id,
1✔
138
                                             object.getPortalTypeName()))
139
        else:
140
            for object_id, object, ignored in exportable:
1✔
141
                objects_csv_writer.writerow((object_id,
1✔
142
                                             object.getPortalTypeName()))
143

144
                workflows = wft.getWorkflowsFor(object)
1✔
145
                for workflow in workflows:
1✔
146
                    workflow_id = workflow.getId()
1✔
147
                    state_variable = workflow.state_var
1✔
148
                    state_record = wft.getStatusOf(workflow_id, object)
1✔
149
                    if state_record is None:
1!
150
                        continue
×
151
                    state = state_record.get(state_variable)
1✔
152
                    wf_csv_writer.writerow((object_id, workflow_id, state))
1✔
153

154
            export_context.writeDataFile('.workflow_states',
1✔
155
                                         text=wf_stream.getvalue(),
156
                                         content_type=content_type,
157
                                         subdir=subdir)
158

159
        export_context.writeDataFile('.objects',
1✔
160
                                     text=objects_stream.getvalue(),
161
                                     content_type=content_type,
162
                                     subdir=subdir)
163

164
        parser = ConfigParser()
1✔
165

166
        title = self.context.Title()
1✔
167
        description = self.context.Description()
1✔
168
        # encode if needed; ConfigParser does not support unicode !
169
        title_str = encode_if_needed(title, self._encoding)
1✔
170
        description_str = encode_if_needed(description, self._encoding)
1✔
171
        parser.set('DEFAULT', 'Title', title_str)
1✔
172
        parser.set('DEFAULT', 'Description', description_str)
1✔
173

174
        stream = StringIO()
1✔
175
        parser.write(stream)
1✔
176

177
        try:
1✔
178
            FolderishDAVAwareFileAdapter(self.context).export(export_context,
1✔
179
                                                              subdir, root)
180
        except (AttributeError, MethodNotAllowed):
1✔
181
            export_context.writeDataFile('.properties',
1✔
182
                                         text=stream.getvalue(),
183
                                         content_type='text/plain',
184
                                         subdir=subdir)
185

186
        for id, object in self.context.objectItems():
1✔
187

188
            adapter = IFilesystemExporter(object, None)
1✔
189

190
            if adapter is not None:
1✔
191
                adapter.export(export_context, subdir)
1✔
192

193
    def import_(self, import_context, subdir, root=False):
1✔
194
        """ See IFilesystemImporter.
195
        """
196
        context = self.context
1✔
197
        if not root:
1✔
198
            subdir = '%s/%s' % (subdir, context.getId())
1✔
199

200
        objects = self.read_data_file(import_context, '.objects', subdir)
1✔
201
        workflow_states = self.read_data_file(import_context,
1✔
202
                                              '.workflow_states', subdir)
203
        if objects is None:
1✔
204
            return
1✔
205

206
        dialect = 'excel'
1✔
207
        object_stream = StringIO(objects)
1✔
208
        wf_stream = StringIO(workflow_states)
1✔
209

210
        object_rowiter = reader(object_stream, dialect)
1✔
211
        ours = [_f for _f in tuple(object_rowiter) if _f]
1✔
212
        our_ids = set([item[0] for item in ours])
1✔
213

214
        prior = set(context.contentIds())
1✔
215

216
        preserve = self.read_data_file(import_context, '.preserve', subdir)
1✔
217
        if not preserve:
1✔
218
            preserve = set()
1✔
219
        else:
220
            preservable = prior.intersection(our_ids)
1✔
221
            preserve = set(_globtest(preserve, preservable))
1✔
222

223
        delete = self.read_data_file(import_context, '.delete', subdir)
1✔
224
        if not delete:
1✔
225
            delete = set()
1✔
226
        else:
227
            deletable = prior.difference(our_ids)
1✔
228
            delete = set(_globtest(delete, deletable))
1✔
229

230
        # if it's in our_ids and NOT in preserve, or if it's not in
231
        # our_ids but IS in delete, we're gonna delete it
232
        delete = our_ids.difference(preserve).union(delete)
1✔
233

234
        for id in prior.intersection(delete):
1✔
235
            context._delObject(id)
1✔
236

237
        existing = context.objectIds()
1✔
238

239
        for object_id, portal_type in ours:
1✔
240

241
            if object_id not in existing:
1✔
242
                object = self._makeInstance(object_id, portal_type,
1✔
243
                                            subdir, import_context)
244
                if object is None:
1✔
245
                    logger = import_context.getLogger('SFWA')
1✔
246
                    logger.warning("Couldn't make instance: %s/%s" %
1✔
247
                                   (subdir, object_id))
248
                    continue
1✔
249

250
            wrapped = context._getOb(object_id)
1✔
251

252
            IFilesystemImporter(wrapped).import_(import_context, subdir)
1✔
253

254
        if workflow_states is not None:
1✔
255
            existing = context.objectIds()
1✔
256
            wft = context.portal_workflow
1✔
257
            wf_rowiter = reader(wf_stream, dialect)
1✔
258
            wf_by_objectid = itertools.groupby(wf_rowiter,
1✔
259
                                               operator.itemgetter(0))
260

261
            for object_id, states in wf_by_objectid:
1✔
262
                if object_id not in existing:
1!
263
                    logger = import_context.getLogger('SFWA')
×
264
                    logger.warning("Couldn't set workflow for object %s/%s, it"
×
265
                                   " doesn't exist" % (context.id, object_id))
266
                    continue
×
267

268
                object = context[object_id]
1✔
269
                for object_id, workflow_id, state_id in states:
1✔
270
                    workflow = wft.getWorkflowById(workflow_id)
1✔
271
                    state_variable = workflow.state_var
1✔
272
                    wf_state = {'action': None,
1✔
273
                                'actor': None,
274
                                'comments': 'Setting state to %s' % state_id,
275
                                state_variable: state_id,
276
                                'time': DateTime()}
277

278
                    wft.setStatusOf(workflow_id, object, wf_state)
1✔
279
                    workflow.updateRoleMappingsFor(object)
1✔
280

281
                object.reindexObject()
1✔
282

283
    def _makeInstance(self, id, portal_type, subdir, import_context):
1✔
284

285
        context = self.context
1✔
286
        subdir = '%s/%s' % (subdir, id)
1✔
287
        properties = self.read_data_file(import_context, '.properties',
1✔
288
                                         subdir)
289
        tool = getUtility(ITypesTool)
1✔
290

291
        try:
1✔
292
            tool.constructContent(portal_type, context, id)
1✔
293
        except ValueError:  # invalid type
1✔
294
            return None
1✔
295

296
        content = context._getOb(id)
1✔
297

298
        if properties is not None:
1✔
299
            if '[DEFAULT]' not in properties:
1✔
300
                try:
1✔
301
                    adp = FolderishDAVAwareFileAdapter
1✔
302
                    adp(content).import_(import_context, subdir)
1✔
303
                    return content
1✔
304
                except (AttributeError, MethodNotAllowed):
×
305
                    # Fall through to old implemenatation below
306
                    pass
×
307

308
            lines = properties.splitlines()
1✔
309

310
            stream = StringIO('\n'.join(lines))
1✔
311
            parser = ConfigParser(defaults={'title': '',
1✔
312
                                            'description': 'NONE'})
313
            try:
1✔
314
                parser.read_file(stream)
1✔
315
            except AttributeError:  # Python 2
×
316
                parser.readfp(stream)
×
317

318
            title = parser.get('DEFAULT', 'title')
1✔
319
            description = parser.get('DEFAULT', 'description')
1✔
320

321
            content.setTitle(title)
1✔
322
            content.setDescription(description)
1✔
323

324
        return content
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc