/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.contrib.confluence.filter.internal.input; import java.io.*; import java.util.*; import java.util.regex.Pattern; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Provider; import com.xpn.xwiki.XWiki; import org.apache.commons.configuration2.ex.ConfigurationException; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.slf4j.Logger; import org.xwiki.bridge.DocumentAccessBridge; import org.xwiki.component.annotation.Component; import org.xwiki.component.annotation.InstantiationStrategy; import org.xwiki.component.descriptor.ComponentInstantiationStrategy; import org.xwiki.contrib.confluence.filter.input.ConfluenceInputContext; import org.xwiki.contrib.confluence.filter.input.ConfluenceInputProperties; import org.xwiki.contrib.confluence.filter.input.ConfluenceProperties; import org.xwiki.contrib.confluence.filter.input.ConfluenceXMLPackage; import org.xwiki.contrib.confluence.filter.internal.ConfluenceFilter; import org.xwiki.contrib.confluence.parser.confluence.internal.ConfluenceParser; import org.xwiki.contrib.confluence.parser.xhtml.ConfluenceXHTMLInputProperties; import org.xwiki.contrib.confluence.parser.xhtml.internal.ConfluenceXHTMLParser; import org.xwiki.contrib.confluence.parser.xhtml.internal.InternalConfluenceXHTMLInputProperties; import org.xwiki.environment.Environment; import org.xwiki.filter.FilterEventParameters; import org.xwiki.filter.FilterException; import org.xwiki.filter.event.model.WikiAttachmentFilter; import org.xwiki.filter.event.model.WikiDocumentFilter; import org.xwiki.filter.event.model.WikiObjectFilter; import org.xwiki.filter.event.user.GroupFilter; import org.xwiki.filter.event.user.UserFilter; import org.xwiki.filter.input.AbstractBeanInputFilterStream; import org.xwiki.filter.input.BeanInputFilterStream; import org.xwiki.filter.input.BeanInputFilterStreamFactory; import org.xwiki.filter.input.InputFilterStreamFactory; import org.xwiki.filter.input.StringInputSource; import org.xwiki.job.event.status.JobProgressManager; import org.xwiki.model.EntityType; import org.xwiki.model.reference.*; import org.xwiki.rendering.listener.Listener; import org.xwiki.rendering.parser.ParseException; import org.xwiki.rendering.parser.StreamParser; import org.xwiki.rendering.renderer.PrintRenderer; import org.xwiki.rendering.renderer.PrintRendererFactory; import org.xwiki.rendering.renderer.printer.DefaultWikiPrinter; import org.xwiki.rendering.syntax.Syntax; import org.xwiki.user.UserManager; import org.xwiki.user.UserReference; import org.xwiki.user.UserReferenceResolver; /** * @version $Id$ * @since 9.0 */ @Component @Named(ConfluenceInputFilterStreamFactory.ROLEHINT) @InstantiationStrategy(ComponentInstantiationStrategy.PER_LOOKUP) public class ConfluenceInputFilterStream extends AbstractBeanInputFilterStream { private static final Pattern FORBIDDEN_USER_CHARACTERS = Pattern.compile("[. /]"); @Inject @Named(ConfluenceParser.SYNTAX_STRING) private StreamParser confluenceWIKIParser; @Inject @Named(ConfluenceXHTMLParser.SYNTAX_STRING) private InputFilterStreamFactory confluenceXHTMLParserFactory; @Inject private Provider converterProvider; @Inject private JobProgressManager progress; @Inject @Named("xwiki/2.1") private PrintRendererFactory xwiki21Factory; @Inject private EntityReferenceSerializer serializer; @Inject private ConfluenceInputContext context; @Inject private XWikiConverter converter; @Inject private ConfluenceXMLPackage confluencePackage; @Inject private Logger logger; @Inject private UserReferenceResolver userReferenceResolver; @Inject private UserManager userManager; @Inject private DocumentAccessBridge documentAccessBridge; @Inject private Environment environment; @Override public void close() throws IOException { this.properties.getSource().close(); } @Override protected void read(Object filter, ConfluenceFilter proxyFilter) throws FilterException { if (this.context instanceof DefaultConfluenceInputContext) { ((DefaultConfluenceInputContext) this.context).set(this.properties); } try { readInternal(filter, proxyFilter); } finally { if (this.context instanceof DefaultConfluenceInputContext) { ((DefaultConfluenceInputContext) this.context).remove(); } } } private void readInternal(Object filter, ConfluenceFilter proxyFilter) throws FilterException { // Prepare package try { this.confluencePackage.read(this.properties.getSource()); } catch (Exception e) { throw new FilterException("Failed to read package", e); } Map> pages = this.confluencePackage.getPages(); if (this.properties.isUsersEnabled()) { Collection users = this.confluencePackage.getInternalUsers(); // TODO get users in new format (this.confluencePackage.getAllUsers()) Collection groups = this.confluencePackage.getGroups(); this.progress.pushLevelProgress(users.size() + groups.size() + pages.size() + pages.entrySet().stream().mapToInt(e -> e.getValue().size()).sum(), this); sendUsers(users, groups, proxyFilter); } else { this.progress.pushLevelProgress( pages.size() + pages.entrySet().stream().mapToInt(e -> e.getValue().size()).sum(), this); } // Generate documents events for (Map.Entry> entry : pages.entrySet()) { long spaceId = entry.getKey(); ConfluenceProperties spaceProperties; try { spaceProperties = this.confluencePackage.getSpaceProperties(spaceId); } catch (ConfigurationException e) { throw new FilterException("Failed to get space properties", e); } String spaceKey = toEntityName(ConfluenceXMLPackage.getSpaceKey(spaceProperties)); FilterEventParameters spaceParameters = new FilterEventParameters(); // @baerthlein Start Collection spacePermissions = this.confluencePackage.getSpacePermissions(spaceId); //if (this.properties.isUsersEnabled()) // create groups and users in order to assign spacepermissions later on try { sendSpacePermissionUsersAndGroups(spaceId, spacePermissions, proxyFilter); } catch (FilterException e) { logger.error("Problem creating groups and users from confluence space permissions (space-Id: "+spaceId+")", e); } // > WikiSpace proxyFilter.beginWikiSpace(spaceKey, spaceParameters); // Main page StringBuilder script = new StringBuilder(); Long descriptionId = spaceProperties.getLong(ConfluenceXMLPackage.KEY_SPACE_DESCRIPTION, null); if (descriptionId != null) { this.progress.startStep(this); if (this.properties.isIncluded(descriptionId)) { readPage(descriptionId, spaceKey, filter, proxyFilter, script); } this.progress.endStep(this); } // Other pages for (long pageId : entry.getValue()) { this.progress.startStep(this); if (this.properties.isIncluded(pageId)) { readPage(pageId, spaceKey, filter, proxyFilter, script); } this.progress.endStep(this); } if (script.length() > 0){ script.insert(0, "{{velocity}}\n").append("{{/velocity}}\n"); String filename = this.environment.getTemporaryDirectory().toPath() + "/pagerestriction-script-"+spaceKey+".vm"; FileOutputStream fileStream = null; try { fileStream = new FileOutputStream(filename); fileStream.write(script.toString().getBytes()); fileStream.flush(); fileStream.close(); this.logger.info("Created pagerestriction-script: " + filename); } catch (FileNotFoundException e) { logger.error("File not found error writing pagerestriction-script: " + filename, e); } catch (IOException e) { logger.error("IO-error writing pagerestriction-script: " + filename, e); } } try { // Problems with NPE seems that I'm unable to manipulate a simple xObject??? //sendSpacePermissions(proxyFilter, spaceId, spacePermissions); // seems there is no xxx.WebPreferences Document so a NPE is thrown // create a new xxx.WebPreferences // > WikiDocument String webPreferences = "WebPreferences"; // in the context of parent node of this space //spaceKey + ".WebPreferences"; // eg. TESTARGESPACE02.WebPreferences // FilterEventParameters documentParameters = new FilterEventParameters(); proxyFilter.beginWikiDocument(webPreferences, FilterEventParameters.EMPTY /*documentParameters*/); // now it should be possible to persist global permissions into this WebPreferences-document createSpacePermissionsImportScript(spaceId, spaceKey, spacePermissions, proxyFilter); // < WikiDocument proxyFilter.endWikiDocument(webPreferences, FilterEventParameters.EMPTY /*documentParameters*/); } catch (FilterException e) { logger.error("Error creating spacepermissions-script (space-Id: "+spaceId+") ", e); } // < WikiSpace proxyFilter.endWikiSpace(spaceKey, spaceParameters); } this.progress.popLevelProgress(this); // Cleanup try { this.confluencePackage.close(); } catch (IOException e) { throw new FilterException("Failed to close package", e); } } /** * users & groups have to be created in order to assign them permissions * * @param spaceId confluence space id * @param spaceKey confluence space key * @param spacePermissions confluence space permissions * @param proxyFilter confluence filter * @throws FilterException filter error */ private void createSpacePermissionsImportScript(long spaceId, String spaceKey, Collection spacePermissions, ConfluenceFilter proxyFilter) throws FilterException { // in order to use few objects, collect oll permissions on user, group base Map> userPermissions = new HashMap<>(); Map> groupPermissions = new HashMap<>(); for (Long spacePermissionId : spacePermissions) { ConfluenceProperties spacePermissionProperties; try { spacePermissionProperties = this.confluencePackage.getSpacePermissionProperties(spaceId, spacePermissionId); } catch (ConfigurationException e) { throw new FilterException("Failed to get spacepermission properties", e); } final String group = spacePermissionProperties.getString("group"); final String permission = spacePermissionProperties.getString("type"); final String xWikiPermission = convertConfluenceSpacepermission2xWikiPermission(permission); final String user = spacePermissionProperties.getString("userSubject"); if (StringUtils.isEmpty(xWikiPermission)) continue; if (StringUtils.isNoneEmpty(group)){ addPermissionIfNotExists(groupPermissions, group, xWikiPermission); } else if (StringUtils.isNoneEmpty(user)) { final ConfluenceProperties userImplProperties; // look up ConfluenceUserImpl in order to get username aso try { userImplProperties = this.confluencePackage.getUserImplProperties(user); } catch (ConfigurationException e) { throw new FilterException("Failed to get UserImpl properties for user-key: "+ user, e); } String lowerName = userImplProperties.getString(ConfluenceXMLPackage.USERIMPL_LOWERNAME, ""); if (this.properties.isConvertToXWiki()) { if (lowerName.equals("admin")) { lowerName = "Admin"; } } addPermissionIfNotExists(userPermissions, lowerName, xWikiPermission); } } String globalPermissionDocument = "WebPreferences"; //spaceKey + ".WebPreferences"; // eg. TESTARGESPACE02.WebPreferences writeSpacePermissions(proxyFilter, groupPermissions, globalPermissionDocument, false); writeSpacePermissions(proxyFilter, userPermissions, globalPermissionDocument, true); /* {{/velocity}} #set ($mydoc = $xwiki.getDocument("TESTARGESPACE02.WebPreferences")) #set ($RightsObjectAdmins = $mydoc.newObject("XWiki.XWikiGlobalRights")) ## #set ($result = $RightsObjectAdmins.set("groups", "XWiki.zbg_bedienstete")) #set ($result = $RightsObjectAdmins.set("levels", "view,comment,edit,delete")) #set ($result = $RightsObjectAdmins.set("users", "XWiki.baerthlein,XWiki.loc_hettegger_ib")) #set ($result = $RightsObjectAdmins.set("allow", 1)) $mydoc.save() {{/velocity}}*/ /* StringBuilder body = new StringBuilder(); for (Map.Entry> set : groupPermissions.entrySet()) { // always create a new Object so the existing (rights) Objects don't get overwritten body.append("#set ($RightsObjectAdmins = $mydoc.newObject(\"XWiki.XWikiGlobalRights\"))\n"); body.append("#set ($result = $RightsObjectAdmins.set(\"groups\", \"XWiki."+set.getKey()+"\"))\n"); String levels = String.join(",", set.getValue()); body.append("#set ($result = $RightsObjectAdmins.set(\"levels\", \""+levels+"\"))\n"); body.append("#set ($result = $RightsObjectAdmins.set(\"allow\", 1))\n"); body.append("$mydoc.save()\n"); } for (Map.Entry> set : userPermissions.entrySet()) { // always create a new Object so the existing (rights) Objects don't get overwritten body.append("#set ($RightsObjectAdmins = $mydoc.newObject(\"XWiki.XWikiGlobalRights\"))\n"); body.append("#set ($result = $RightsObjectAdmins.set(\"users\", \"XWiki."+set.getKey()+"\"))\n"); String levels = String.join(",", set.getValue()); body.append("#set ($result = $RightsObjectAdmins.set(\"levels\", \""+levels+"\"))\n"); body.append("#set ($result = $RightsObjectAdmins.set(\"allow\", 1))\n"); body.append("$mydoc.save()\n"); } // todo: spaceKey == xWiki - Webhome / name ? StringBuilder header = new StringBuilder("{{velocity}}\n"); header.append("#set ($mydoc = $xwiki.getDocument(\""+spaceKey+".WebPreferences\"))\n"); StringBuilder footer = new StringBuilder("{{/velocity}}\n"); String script = header.toString() + body.toString() + footer.toString(); String filename = this.environment.getTemporaryDirectory().toPath() + "/spacepermission-script-"+spaceKey+".vm"; FileOutputStream fileStream = null; try { fileStream = new FileOutputStream(filename); fileStream.write(script.getBytes()); fileStream.flush(); fileStream.close(); this.logger.info("Created spacepermissions-script: " + filename); } catch (FileNotFoundException e) { logger.error("File not found error writing spacepermission-script: " + filename, e); } catch (IOException e) { logger.error("IO-error writing spacepermission-script: " + filename, e); }*/ } /** * Checks if user or group has permissions and adds new permission to the user/group-key * @param permissions Collection of user or group-permissions * @param userOrgroup user or group-name * @param xWikiPermission permission to be added */ private static void addPermissionIfNotExists(Map> permissions, String userOrgroup, String xWikiPermission) { String prefixedUserOrGroup = XWiki.SYSTEM_SPACE + "." + userOrgroup; if (permissions.get(prefixedUserOrGroup) == null){ permissions.put(prefixedUserOrGroup, Arrays.asList(xWikiPermission)); } else { if (permissions.get(prefixedUserOrGroup).contains(xWikiPermission)) return; List newPermissions = new ArrayList<>(); newPermissions.addAll(permissions.get(prefixedUserOrGroup)); newPermissions.add(xWikiPermission); permissions.put(prefixedUserOrGroup, newPermissions); } } private static void writeSpacePermissions(ConfluenceFilter proxyFilter, Map> groupPermissions, String globalPermissionDocument, boolean isUserCollection) throws FilterException { for (Map.Entry> set : groupPermissions.entrySet()) { // Permission object FilterEventParameters objectProps = new FilterEventParameters(); objectProps.put(WikiObjectFilter.PARAMETER_CLASS_REFERENCE, XWiki.SYSTEM_SPACE + ".XWikiGlobalRights"); proxyFilter.beginWikiObject(globalPermissionDocument, objectProps); // permissions object property String levels = String.join(",", set.getValue()); proxyFilter.onWikiObjectProperty("levels", levels, new FilterEventParameters()); if (isUserCollection) proxyFilter.onWikiObjectProperty("users", set.getKey(), new FilterEventParameters()); else proxyFilter.onWikiObjectProperty("groups", set.getKey(), new FilterEventParameters()); proxyFilter.onWikiObjectProperty("allow", 1, new FilterEventParameters()); // persist wiki object proxyFilter.endWikiObject(globalPermissionDocument, objectProps); } } /** * Checks if the group is existing * @param groupname Groupname to check * @return true = group exists, false = group does not exist */ private boolean isExistingGroup(String groupname) { if (this.properties.isConvertToXWiki()) { if (groupname.equals("confluence-administrators")) { groupname = "XWikiAdminGroup"; } else if (groupname.equals("confluence-users")) { groupname = "XWikiAllGroup"; } } return documentAccessBridge.exists(XWiki.SYSTEM_SPACE + "." +groupname); } /** * Extracts all groups and users from the confluence spacepermissions and if not existing creates them * @param spaceId Id of the space containing the permissions * @param spacePermissions Collection of spacePermissions (properties) * @param proxyFilter Filter doing the input output convertion * @throws FilterException if Exception occurs logs them */ private void sendSpacePermissionUsersAndGroups(long spaceId, Collection spacePermissions, ConfluenceFilter proxyFilter) throws FilterException { List uniqueUsernames = new ArrayList<>(); List uniqueGroupnames = new ArrayList<>(); for (Long spacePermissionId : spacePermissions) { this.progress.startStep(this); ConfluenceProperties spacePermissionProperties; try { spacePermissionProperties = this.confluencePackage.getSpacePermissionProperties(spaceId, spacePermissionId); } catch (ConfigurationException e) { throw new FilterException("Failed to get space-permission properties for space-Id: "+ spaceId +" and permission-Id: "+spacePermissionId, e); } // Seems that 1 Permission is always assign to 1 group our 1 user String groupName = spacePermissionProperties.getString(ConfluenceXMLPackage.SPACEPERMISSION_GROUP, ""); String useSubject = spacePermissionProperties.getString(ConfluenceXMLPackage.SPACEPERMISSION_USER, ""); String anonymous = spacePermissionProperties.getString(ConfluenceXMLPackage.SPACEPERMISSION_ANONYMOUS, ""); if (StringUtils.isNoneEmpty(groupName)){ if (uniqueGroupnames.contains(groupName)) continue; if (isExistingGroup(groupName)){ uniqueGroupnames.add(groupName); continue; } sendSpacePermissionGroups(proxyFilter, uniqueGroupnames, groupName); uniqueGroupnames.add(groupName); logger.info("Created group: " + groupName + " from spacerpermissions."); } else if (StringUtils.isNoneEmpty(useSubject)) { try { ConfluenceProperties userImplProperties = this.confluencePackage.getUserImplProperties(useSubject); String lowerName = userImplProperties.getString(ConfluenceXMLPackage.USERIMPL_LOWERNAME, ""); String xWikiUsername = lowerName; if (this.properties.isConvertToXWiki()) { if (lowerName.equals("admin")) { xWikiUsername = "Admin"; } } if (uniqueUsernames.contains(lowerName)) continue; if (isExistingUser(xWikiUsername)){ uniqueUsernames.add(lowerName); continue; } sendSpacePermissionUsers(proxyFilter, userImplProperties, lowerName); uniqueUsernames.add(lowerName); logger.info("Created user: " + lowerName + " from spacerpermissions."); } catch (ConfigurationException e) { throw new FilterException("Failed to get UserImpl properties for user-key: "+ useSubject, e); } } // Anonymous won't be migrated else if (StringUtils.isNoneEmpty(anonymous)) { continue; } this.progress.endStep(this); } } /** * Creates user if not existing * * @param proxyFilter Filter doing the iput / output convertion * @param userImplProperties Internal confluence user-Id (GDPR obfuscated) * @param lowerName lower username * @throws FilterException convertion error */ private void sendSpacePermissionUsers(ConfluenceFilter proxyFilter, ConfluenceProperties userImplProperties, String lowerName) throws FilterException { String email = userImplProperties.getString(ConfluenceXMLPackage.USERIMPL_EMAIL, ""); FilterEventParameters userParameters = new FilterEventParameters(); userParameters.put(UserFilter.PARAMETER_FIRSTNAME, lowerName); userParameters.put(UserFilter.PARAMETER_LASTNAME, lowerName); userParameters.put(UserFilter.PARAMETER_EMAIL, email); userParameters.put(UserFilter.PARAMETER_ACTIVE,true); // all users should be activated per default // TODO: no idea how to import/convert the password, probably salted with the Confluence instance id // > User proxyFilter.beginUser(lowerName, userParameters); // < User proxyFilter.endUser(lowerName, userParameters); } /** * Checks if the user exists * @param lowerName lower user name * @return true = user exists, false = user does not exist */ private boolean isExistingUser(String lowerName) { // there is not more data then... first + lastname are missing in the export UserReference reference = userReferenceResolver.resolve(XWiki.SYSTEM_SPACE + "." +lowerName); // skip user-creation if existing or empty return userManager.exists(reference); } /** * Creates groups if it does not exists * @param proxyFilter Filter doing input/output convertion * @param uniqueGroupnames Collection checking for unique groupnames (beeing extracted from the confluence space permissions) * @param groupname current groupname * @return true = group exists, false = group does not exist * @throws FilterException Convertion Error */ private boolean sendSpacePermissionGroups(ConfluenceFilter proxyFilter, List uniqueGroupnames, String groupname) throws FilterException { // only create unique, not existing groups if (uniqueGroupnames.contains(groupname) || StringUtils.isEmpty(groupname) || documentAccessBridge.exists(XWiki.SYSTEM_SPACE + "." + groupname)) {// check if group / document / pagereference exists this.progress.endStep(this); return true; } uniqueGroupnames.add(groupname); FilterEventParameters groupParameters = new FilterEventParameters(); // > Group proxyFilter.beginGroupContainer(groupname, groupParameters); // < Group proxyFilter.endGroupContainer(groupname, groupParameters); return false; } private String convertConfluenceSpacepermission2xWikiPermission(String spacePermission){ // xWiki Permissions // view - ability to view pages // comment - ability to write page comments // edit - ability to edit page content // script - control programmatically exactly what users are allowed to do // delete - ability to delete pages // admin - ability to administer pages or the wiki // Confluence Space Permissions // VIEWSPACE // REMOVEOWNCONTENT // EDITSPACE // REMOVEPAGE // EDITBLOG // REMOVEBLOG // CREATEATTACHMENT // REMOVEATTACHMENT // COMMENT // REMOVECOMMENT // SETPAGEPERMISSIONS // REMOVEMAIL // EXPORTSPACE // SETSPACEPERMISSIONS String xWikipermission = ""; if (spacePermission.equals("VIEWSPACE")) { xWikipermission = "view"; } else if(spacePermission.equals("EDITSPACE") || spacePermission.equals("EDITBLOG")) { xWikipermission = "edit"; } else if(spacePermission.equals("COMMENT")) { xWikipermission = "comment"; } else if(spacePermission.equals("SETSPACEPERMISSIONS")) { xWikipermission = "admin"; } return xWikipermission; } private String convertConfluencePageRestrictions2xWikiPermission(String pageRestriction){ // xWiki Permissions // view - ability to view pages // comment - ability to write page comments // edit - ability to edit page content // script - control programmatically exactly what users are allowed to do // delete - ability to delete pages // admin - ability to administer pages or the wiki // Confluence Page Restrictions // edit // view String xWikipermission = ""; if (pageRestriction.equals("View")) { xWikipermission = "view"; } else if(pageRestriction.equals("Edit")) { xWikipermission = "view,edit,comment,delete"; } return xWikipermission; } private void sendUsers(Collection users, Collection groups, ConfluenceFilter proxyFilter) throws FilterException { // Switch the wiki if a specific one is forced if (this.properties.getUsersWiki() != null) { proxyFilter.beginWiki(this.properties.getUsersWiki(), FilterEventParameters.EMPTY); } // Generate users events for (Long userId : users) { this.progress.startStep(this); ConfluenceProperties userProperties; try { userProperties = this.confluencePackage.getInternalUserProperties(userId); } catch (ConfigurationException e) { throw new FilterException("Failed to get user properties", e); } String userName = toUserReferenceName( userProperties.getString(ConfluenceXMLPackage.KEY_USER_NAME, String.valueOf(userId))); FilterEventParameters userParameters = new FilterEventParameters(); userParameters.put(UserFilter.PARAMETER_FIRSTNAME, userProperties.getString(ConfluenceXMLPackage.KEY_USER_FIRSTNAME)); userParameters.put(UserFilter.PARAMETER_LASTNAME, userProperties.getString(ConfluenceXMLPackage.KEY_USER_LASTNAME)); userParameters.put(UserFilter.PARAMETER_EMAIL, userProperties.getString(ConfluenceXMLPackage.KEY_USER_EMAIL)); userParameters.put(UserFilter.PARAMETER_ACTIVE, userProperties.getBoolean(ConfluenceXMLPackage.KEY_USER_ACTIVE, true)); try { userParameters.put(UserFilter.PARAMETER_REVISION_DATE, this.confluencePackage.getDate(userProperties, ConfluenceXMLPackage.KEY_USER_REVISION_DATE)); userParameters.put(UserFilter.PARAMETER_CREATION_DATE, this.confluencePackage.getDate(userProperties, ConfluenceXMLPackage.KEY_USER_CREATION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse the user date", e); } } // TODO: no idea how to import/convert the password, probably salted with the Confluence instance id // > User proxyFilter.beginUser(userName, userParameters); // < User proxyFilter.endUser(userName, userParameters); this.progress.endStep(this); } // Generate groups events for (long groupInt : groups) { this.progress.startStep(this); ConfluenceProperties groupProperties; try { groupProperties = this.confluencePackage.getGroupProperties(groupInt); } catch (ConfigurationException e) { throw new FilterException("Failed to get group properties", e); } String groupName = groupProperties.getString(ConfluenceXMLPackage.KEY_GROUP_NAME, String.valueOf(groupInt)); if (this.properties.isConvertToXWiki()) { if (groupName.equals("confluence-administrators")) { groupName = "XWikiAdminGroup"; } else if (groupName.equals("confluence-users")) { groupName = "XWikiAllGroup"; } } FilterEventParameters groupParameters = new FilterEventParameters(); try { groupParameters.put(GroupFilter.PARAMETER_REVISION_DATE, this.confluencePackage.getDate(groupProperties, ConfluenceXMLPackage.KEY_GROUP_REVISION_DATE)); groupParameters.put(GroupFilter.PARAMETER_CREATION_DATE, this.confluencePackage.getDate(groupProperties, ConfluenceXMLPackage.KEY_GROUP_CREATION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse the group date", e); } } // > Group proxyFilter.beginGroupContainer(groupName, groupParameters); // Members users if (groupProperties.containsKey(ConfluenceXMLPackage.KEY_GROUP_MEMBERUSERS)) { List groupMembers = this.confluencePackage.getLongList(groupProperties, ConfluenceXMLPackage.KEY_GROUP_MEMBERUSERS); for (Long memberInt : groupMembers) { FilterEventParameters memberParameters = new FilterEventParameters(); try { String memberId = this.confluencePackage.getInternalUserProperties(memberInt) .getString(ConfluenceXMLPackage.KEY_USER_NAME, String.valueOf(memberInt)); if (this.properties.isConvertToXWiki() && memberId.equals("admin")) { memberId = "Admin"; } proxyFilter.onGroupMemberGroup(memberId, memberParameters); } catch (Exception e) { this.logger.error("Failed to get user properties", e); } } } // Members groups if (groupProperties.containsKey(ConfluenceXMLPackage.KEY_GROUP_MEMBERGROUPS)) { List groupMembers = this.confluencePackage.getLongList(groupProperties, ConfluenceXMLPackage.KEY_GROUP_MEMBERGROUPS); for (Long memberInt : groupMembers) { FilterEventParameters memberParameters = new FilterEventParameters(); try { String memberId = this.confluencePackage.getGroupProperties(memberInt) .getString(ConfluenceXMLPackage.KEY_GROUP_NAME, String.valueOf(memberInt)); if (this.properties.isConvertToXWiki()) { if (memberId.equals("confluence-administrators")) { memberId = "XWikiAdminGroup"; } else if (memberId.equals("confluence-users")) { memberId = "XWikiAllGroup"; } } proxyFilter.onGroupMemberGroup(memberId, memberParameters); } catch (Exception e) { this.logger.error("Failed to get group properties", e); } } } // < Group proxyFilter.endGroupContainer(groupName, groupParameters); this.progress.endStep(this); } // Get back to default wiki if (this.properties.getUsersWiki() != null) { proxyFilter.endWiki(this.properties.getUsersWiki(), FilterEventParameters.EMPTY); } } private void readPage(long pageId, String spaceKey, Object filter, ConfluenceFilter proxyFilter, StringBuilder script) throws FilterException { ConfluenceProperties pageProperties = getPageProperties(pageId); if (pageProperties == null) { this.logger.warn("Can't find page with id [{}]", pageId); return; } String documentName; if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_HOMEPAGE)) { documentName = this.properties.getSpacePageName(); } else { documentName = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_TITLE); } // Skip pages with empty title if (StringUtils.isEmpty(documentName)) { this.logger.warn("Found a page without a name or title (id={}). Skipping it.", pageId); return; } // Skip deleted, archived or draft pages String contentStatus = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_CONTENT_STATUS); if (contentStatus != null && (contentStatus.equals("deleted") || contentStatus.equals("archived") || contentStatus.equals("draft"))) return; FilterEventParameters documentParameters = new FilterEventParameters(); if (this.properties.getDefaultLocale() != null) { documentParameters.put(WikiDocumentFilter.PARAMETER_LOCALE, this.properties.getDefaultLocale()); } // Apply the standard entity name validator documentName = toEntityName(documentName); // > WikiDocument proxyFilter.beginWikiDocument(documentName, documentParameters); Locale locale = Locale.ROOT; FilterEventParameters documentLocaleParameters = new FilterEventParameters(); if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_CREATION_AUTHOR)) { documentLocaleParameters.put(WikiDocumentFilter.PARAMETER_CREATION_AUTHOR, toUserReference(pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_CREATION_AUTHOR))); } else if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_CREATION_AUTHOR_KEY)) { String authorKey = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_CREATION_AUTHOR_KEY); String authorName = toUserReference(resolveUserName(authorKey, authorKey)); documentLocaleParameters.put(WikiDocumentFilter.PARAMETER_CREATION_AUTHOR, authorName); } if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_CREATION_DATE)) { try { documentLocaleParameters.put(WikiDocumentFilter.PARAMETER_CREATION_DATE, this.confluencePackage.getDate(pageProperties, ConfluenceXMLPackage.KEY_PAGE_CREATION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse creation date", e); } } } if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISION)) { documentLocaleParameters.put(WikiDocumentFilter.PARAMETER_LASTREVISION, pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_REVISION)); } // > WikiDocumentLocale proxyFilter.beginWikiDocumentLocale(locale, documentLocaleParameters); // Revisions if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISIONS)) { List revisions = this.confluencePackage.getLongList(pageProperties, ConfluenceXMLPackage.KEY_PAGE_REVISIONS); for (Long revisionId : revisions) { readPageRevision(revisionId, spaceKey, filter, proxyFilter, script); } } // Current version readPageRevision(pageId, spaceKey, filter, proxyFilter, script); // < WikiDocumentLocale proxyFilter.endWikiDocumentLocale(locale, documentLocaleParameters); // < WikiDocument proxyFilter.endWikiDocument(documentName, documentParameters); } String resolveUserName(String key, String def) { try { ConfluenceProperties userProperties = this.confluencePackage.getUserProperties(key); if (userProperties != null) { String userName = userProperties.getString(ConfluenceXMLPackage.KEY_USER_NAME); if (userName != null) { return userName; } } } catch (ConfigurationException e) { this.logger.warn("Failed to retrieve properties of user with key [{}]: {}", key, ExceptionUtils.getRootCauseMessage(e)); } return def; } String toMappedUser(String confluenceUser) { if (this.properties.getUserIdMapping() != null) { String mappedName = this.properties.getUserIdMapping().get(confluenceUser); if (mappedName != null) { mappedName = mappedName.trim(); if (!mappedName.isEmpty()) { return mappedName; } } } return confluenceUser; } String toUserReferenceName(String userName) { if (userName == null || !this.properties.isConvertToXWiki()) { // Apply the configured mapping return toMappedUser(userName); } // Translate the usual default admin user in Confluence to it's XWiki counterpart if (userName.equals("admin")) { return "Admin"; } // Apply the configured mapping userName = toMappedUser(userName); // Protected from characters not well supported in user page name depending on the version of XWiki userName = FORBIDDEN_USER_CHARACTERS.matcher(userName).replaceAll("_"); return userName; } String toUserReference(String userName) { if (userName == null || !this.properties.isConvertToXWiki()) { return userName; } // Transform user name according to configuration userName = toUserReferenceName(userName); // Add the "XWiki" space and the wiki if configured. Ideally this should probably be done on XWiki Instance // Output filter side EntityReference reference; if (this.properties.getUsersWiki() != null) { reference = new DocumentReference(this.properties.getUsersWiki(), "XWiki", userName); } else { reference = new LocalDocumentReference("XWiki", userName); } return this.serializer.serialize(reference); } private ConfluenceProperties getPageProperties(Long pageId) throws FilterException { try { return this.confluencePackage.getPageProperties(pageId, false); } catch (ConfigurationException e) { throw new FilterException("Failed to get page properties", e); } } private void readPageRevision(Long pageId, String spaceKey, Object filter, ConfluenceFilter proxyFilter, StringBuilder script) throws FilterException { ConfluenceProperties pageProperties = getPageProperties(pageId); if (pageProperties == null) { this.logger.warn("Can't find page revision with id [{}]", pageId); return; } readPageRevision(pageId, spaceKey, pageProperties, filter, proxyFilter, script); } private void readPageRevision(long pageId, String spaceKey, ConfluenceProperties pageProperties, Object filter, ConfluenceFilter proxyFilter, StringBuilder script) throws FilterException { String revision = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_REVISION); FilterEventParameters documentRevisionParameters = new FilterEventParameters(); if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_PARENT)) { try { documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_PARENT, getReferenceFromId(pageProperties, ConfluenceXMLPackage.KEY_PAGE_PARENT)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse parent", e); } } } if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISION_AUTHOR)) { documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_REVISION_AUTHOR, toUserReference(pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_REVISION_AUTHOR))); } else if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISION_AUTHOR_KEY)) { String authorKey = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_REVISION_AUTHOR_KEY); String authorName = toUserReference(resolveUserName(authorKey, authorKey)); documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_REVISION_AUTHOR, authorName); } if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISION_DATE)) { try { documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_REVISION_DATE, this.confluencePackage.getDate(pageProperties, ConfluenceXMLPackage.KEY_PAGE_REVISION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse date", e); } } } if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_REVISION_COMMENT)) { documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_REVISION_COMMENT, pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_REVISION_COMMENT)); } documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_TITLE, pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_TITLE)); String bodyContent = null; Syntax bodySyntax = null; int bodyType = -1; if (pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_BODY)) { bodyContent = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_BODY); bodyType = pageProperties.getInt(ConfluenceXMLPackage.KEY_PAGE_BODY_TYPE, -1); switch (bodyType) { // Not bodyType means old Confluence syntax case -1: bodyType = 0; case 0: bodySyntax = ConfluenceParser.SYNTAX; break; case 2: bodySyntax = Syntax.CONFLUENCEXHTML_1_0; break; default: if (this.properties.isVerbose()) { this.logger.error("Unknown body type [{}]", bodyType); } break; } } // Content if (bodyContent != null) { if (this.properties.isContentEvents() && filter instanceof Listener) { // > WikiDocumentRevision proxyFilter.beginWikiDocumentRevision(revision, documentRevisionParameters); try { parse(bodyContent, bodyType, this.properties.getMacroContentSyntax(), proxyFilter); } catch (Exception e) { this.logger.error("Failed to parse content of page with id [{}]", pageId, e); } } else if (this.properties.isConvertToXWiki()) { // Convert content to XWiki syntax try { documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_CONTENT, convertToXWiki21(bodyContent, bodyType)); documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_SYNTAX, Syntax.XWIKI_2_1); } catch (Exception e) { this.logger.error("Failed to convert content of the page with id [{}]", pageId, e); } // > WikiDocumentRevision proxyFilter.beginWikiDocumentRevision(revision, documentRevisionParameters); } else { // Keep Confluence syntax documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_CONTENT, bodyContent); documentRevisionParameters.put(WikiDocumentFilter.PARAMETER_SYNTAX, bodySyntax); // > WikiDocumentRevision proxyFilter.beginWikiDocumentRevision(revision, documentRevisionParameters); } } else { // > WikiDocumentRevision proxyFilter.beginWikiDocumentRevision(revision, documentRevisionParameters); } // Attachments Map pageAttachments = new LinkedHashMap<>(); for (long attachmentId : this.confluencePackage.getAttachments(pageId)) { ConfluenceProperties attachmentProperties; try { attachmentProperties = this.confluencePackage.getAttachmentProperties(pageId, attachmentId); } catch (ConfigurationException e) { throw new FilterException("Failed to get attachment properties", e); } String attachmentName = this.confluencePackage.getAttachmentName(attachmentProperties); ConfluenceProperties currentAttachmentProperties = pageAttachments.get(attachmentName); if (currentAttachmentProperties != null) { try { Date date = this.confluencePackage.getDate(attachmentProperties, ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_DATE); Date currentDate = this.confluencePackage.getDate(currentAttachmentProperties, ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_DATE); if (date.after(currentDate)) { pageAttachments.put(attachmentName, attachmentProperties); } } catch (Exception e) { this.logger.warn("Failed to parse the date of attachment with id [{}], skipping it", attachmentId, e); } } else { pageAttachments.put(attachmentName, attachmentProperties); } } for (ConfluenceProperties attachmentProperties : pageAttachments.values()) { readAttachment(pageId, attachmentProperties, filter, proxyFilter); } // Tags Map pageTags = new LinkedHashMap<>(); for (Object tagIdStringObject : pageProperties.getList(ConfluenceXMLPackage.KEY_PAGE_LABELLINGS)) { long tagId = Long.parseLong((String) tagIdStringObject); ConfluenceProperties tagProperties; try { tagProperties = this.confluencePackage.getObjectProperties(tagId); } catch (ConfigurationException e) { throw new FilterException("Failed to get tag properties", e); } String tagName = this.confluencePackage.getTagName(tagProperties); pageTags.put(tagName, tagProperties); } if (!pageTags.isEmpty()) { readPageTags(pageProperties, proxyFilter, pageTags); } // Comments Map pageComments = new LinkedHashMap<>(); Map commentIndeces = new LinkedHashMap<>(); int commentIndex = 0; for (Object commentIdStringObject : pageProperties.getList(ConfluenceXMLPackage.KEY_PAGE_COMMENTS)) { long commentId = Long.parseLong((String) commentIdStringObject); ConfluenceProperties commentProperties; try { commentProperties = this.confluencePackage.getObjectProperties(commentId); } catch (ConfigurationException e) { throw new FilterException("Failed to get comment properties", e); } pageComments.put(commentId, commentProperties); commentIndeces.put(commentId, commentIndex); commentIndex++; } for (Long commentId : pageComments.keySet()) { readPageComment(pageProperties, proxyFilter, commentId, pageComments, commentIndeces); } if (this.properties.isStoreConfluenceDetailsEnabled()) { storeConfluenceDetails(pageId, spaceKey, pageProperties, proxyFilter); } // confluence pagerestrictions readPageRestrictions(pageProperties, proxyFilter, script); // < WikiDocumentRevision proxyFilter.endWikiDocumentRevision(revision, documentRevisionParameters); } /** * Reads the confluence pagepermissions and assignes "view" or "edit" (= view, edit, comment, delete) to users and * groups like set in the space-export (if users or groups do not exist, they won't be created - xWiki permissions * will work as soon as the users and groups have been created). * The xWiki-Permissions are set via the document's XWiki.XWikiRights class and are visible in the edit object mode, * they won't shine up in the page-permissions for some reasons (perhaps index problem)? * * @param pageProperties Currents Pageproperties * @param proxyFilter valid confluence filter * @param script the internally created Velocity script * @throws FilterException Error during reading or setting pageproperties / document */ private void readPageRestrictions(ConfluenceProperties pageProperties, ConfluenceFilter proxyFilter, StringBuilder script) throws FilterException { String objectName = getObjectName(pageProperties); // eg. ITInfo.WebHome Map userPermissions = new HashMap<>(); Map groupPermissions = new HashMap<>(); // confluence content permission set - should be max size = 2 (1x view + 1x edit) for (Object commentIdStringObject : pageProperties.getList(ConfluenceXMLPackage.CONTENT_PERMISSION_SETS)) { long contentPermissionSetId = Long.parseLong((String) commentIdStringObject); ConfluenceProperties contentPermissionSetProps; try { contentPermissionSetProps = this.confluencePackage.getObjectProperties(contentPermissionSetId); } catch (ConfigurationException e) { throw new FilterException("Failed to get contentpermissionsets properties for: " +contentPermissionSetId, e); } // within every view- or edit-set there are the permissions stored and assigned to user and groups // each permission is assigned to only 1 user or 1 group final List contentPermissions = contentPermissionSetProps.getList(ConfluenceXMLPackage.CONTENT_PERMISSIONS); for (Object permissionIdStringObject : contentPermissions) { long permissionId = Long.parseLong((String) permissionIdStringObject); try { final ConfluenceProperties contentPermissionProperty = this.confluencePackage.getObjectProperties(permissionId); final String userSubject = contentPermissionProperty.getString("userSubject"); String username = ""; if (StringUtils.isNoneEmpty(userSubject)) { final ConfluenceProperties userImplProperties; // look up ConfluenceUserImpl in order to get username aso try { userImplProperties = this.confluencePackage.getUserImplProperties(userSubject); } catch (ConfigurationException e) { throw new FilterException("Failed to get UserImpl properties for user-key: "+ userSubject, e); } username = userImplProperties.getString(ConfluenceXMLPackage.USERIMPL_LOWERNAME, ""); // admin should be skipped as we can't restrict an admin...? if (this.properties.isConvertToXWiki()) { if (username.equals("admin")) { username = "Admin"; } } } final String groupName = contentPermissionProperty.getString("groupName"); final String type = contentPermissionProperty.getString("type"); // view, edit etc // confluence stores view and edit-permissions additionally, means user x can have view and edit-permissions // edit-permission implies view-permission. In order to a view OR a edit permission (NOT both) we want to // replace a view-permission if an edit permission exists for the same user or group replaceViewByEditPermission(userPermissions, username, type); replaceViewByEditPermission(groupPermissions, groupName, type); } catch (ConfigurationException e) { logger.error("Failed getting content permissions for: " + permissionId, e); } } } // persist the collected data into xObjects / XWikiRights StringBuilder body = new StringBuilder(); writePageResrticitions(proxyFilter, objectName, userPermissions, true, body); writePageResrticitions(proxyFilter, objectName, groupPermissions, false, body); if (body.length() > 0) script.append("#set ($mydoc = $xwiki.getDocument(\"").append(objectName).append("\"))\n").append(body); // document-ref eg. TESTARGESPACE02.Testseite 3 } private void writePageResrticitions(ConfluenceFilter proxyFilter, String objectName, Map userPermissions, boolean isUsercollection, StringBuilder body) throws FilterException { for (Map.Entry entry : userPermissions.entrySet()) { // Permission object FilterEventParameters restrictionsParameters = new FilterEventParameters(); restrictionsParameters.put(WikiObjectFilter.PARAMETER_CLASS_REFERENCE, XWiki.SYSTEM_SPACE + ".XWikiRights"); proxyFilter.beginWikiObject(objectName, restrictionsParameters); // permissions object property String levels = this.convertConfluencePageRestrictions2xWikiPermission(entry.getValue()); proxyFilter.onWikiObjectProperty("levels", levels, new FilterEventParameters()); if (isUsercollection) proxyFilter.onWikiObjectProperty("users", XWiki.SYSTEM_SPACE + "." + entry.getKey(), new FilterEventParameters()); else proxyFilter.onWikiObjectProperty("groups", XWiki.SYSTEM_SPACE + "." + entry.getKey(), new FilterEventParameters()); proxyFilter.onWikiObjectProperty("allow", 1, new FilterEventParameters()); // persist wiki object proxyFilter.endWikiObject(objectName, restrictionsParameters); // generating Velocity Script as currently the Pagerestrictiong are getting overwritten by the "Nested Page Migrator" Plugin // always create a new Object so the existing (rights) Objects don't get overwritten body.append("#set ($rightsObject = $mydoc.newObject(\""+ XWiki.SYSTEM_SPACE + ".XWikiRights"+"\"))\n"); if (isUsercollection) body.append("#set ($result = $rightsObject.set(\"users\", \""+ XWiki.SYSTEM_SPACE + "." + entry.getKey() +"\"))\n"); else body.append("#set ($result = $rightsObject.set(\"groups\", \""+ XWiki.SYSTEM_SPACE + "." + entry.getKey() +"\"))\n"); body.append("#set ($result = $rightsObject.set(\"levels\", \""+levels+"\"))\n"); body.append("#set ($result = $rightsObject.set(\"allow\", 1))\n"); body.append("$mydoc.save()\n"); } } /** * if user or a group has Edit-Permission replace the View-Permission * @param permissions collection containing user or group (key = user or group-name, value = permissiontype) * @param userOrGroup name of the user or the group * @param permissionType Edit or View */ private void replaceViewByEditPermission(Map permissions, String userOrGroup, String permissionType) { if (StringUtils.isNoneEmpty(userOrGroup)){ String value = permissions.get(userOrGroup); if (value == null) permissions.put(userOrGroup, permissionType); else if (value.equals("View") && permissionType.equals("Edit")) permissions.replace(userOrGroup, permissionType); } } /** * @param currentProperties the properties where to find the page identifier * @param key the key to find the page identifier * @return the reference of the page * @throws ConfigurationException when failing to get page properties * @throws FilterException when failing to create the reference */ public EntityReference getReferenceFromId(ConfluenceProperties currentProperties, String key) throws ConfigurationException, FilterException { Long pageId = currentProperties.getLong(key, null); if (pageId != null) { ConfluenceProperties pageProperties = this.confluencePackage.getPageProperties(pageId, true); long spaceId = pageProperties.getLong(ConfluenceXMLPackage.KEY_PAGE_SPACE); String pageTitle = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_TITLE); if (StringUtils.isNotEmpty(pageTitle)) { long currentSpaceId = currentProperties.getLong(ConfluenceXMLPackage.KEY_PAGE_SPACE); EntityReference spaceReference = null; if (spaceId != currentSpaceId) { String spaceName = this.confluencePackage.getSpaceKey(spaceId); if (spaceName != null) { spaceReference = new EntityReference(toEntityName(spaceName), EntityType.SPACE); } } return new EntityReference(toEntityName(pageTitle), EntityType.DOCUMENT, spaceReference); } else { throw new FilterException("Cannot create a reference to the page with id [" + pageId + "] because it does not have any title"); } } return null; } /** * @param name the name to validate * @return the validated name * @since 9.16.1 */ public String toEntityName(String name) { if (this.properties.isConvertToXWiki() && this.properties.isEntityNameValidation()) { return this.converter.convert(name); } return name; } /** * @since 9.13 */ private void storeConfluenceDetails(long pageId, String spaceKey, ConfluenceProperties pageProperties, ConfluenceFilter proxyFilter) throws FilterException { FilterEventParameters pageReportParameters = new FilterEventParameters(); String objectName = getObjectName(pageProperties); // Page report object pageReportParameters.put(WikiObjectFilter.PARAMETER_NUMBER, 0); pageReportParameters.put(WikiObjectFilter.PARAMETER_CLASS_REFERENCE, "Confluence.Code.ConfluencePageClass"); proxyFilter.beginWikiObject(objectName, pageReportParameters); StringBuilder pageURLBuilder = new StringBuilder(); if (this.properties.getBaseURLs() != null) { pageURLBuilder.append(this.properties.getBaseURLs().get(0).toString()); pageURLBuilder.append("/wiki/spaces/").append(spaceKey); if (!pageProperties.containsKey(ConfluenceXMLPackage.KEY_PAGE_HOMEPAGE)) { String pageName = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_TITLE); pageURLBuilder.append("/pages/").append(pageId).append("/").append(pageName); } } proxyFilter.onWikiObjectProperty("id", pageId, new FilterEventParameters()); proxyFilter.onWikiObjectProperty("url", pageURLBuilder.toString(), new FilterEventParameters()); proxyFilter.onWikiObjectProperty("space", spaceKey, new FilterEventParameters()); proxyFilter.endWikiObject(objectName, pageReportParameters); } private String convertToXWiki21(String bodyContent, int bodyType) throws FilterException, ParseException { DefaultWikiPrinter printer = new DefaultWikiPrinter(); PrintRenderer renderer = this.xwiki21Factory.createRenderer(printer); parse(bodyContent, bodyType, Syntax.XWIKI_2_1, renderer); return printer.toString(); } private ConfluenceConverterListener createConverter(Listener listener) { ConfluenceConverterListener converterListener = this.converterProvider.get(); converterListener.initialize(this.confluencePackage, this, this.properties); converterListener.setWrappedListener(listener); return converterListener; } private Listener wrap(Listener listener) { if (this.properties.isConvertToXWiki()) { return createConverter(listener); } return listener; } private void parse(String bodyContent, int bodyType, Syntax macroContentSyntax, Listener listener) throws FilterException, ParseException { switch (bodyType) { case 0: this.confluenceWIKIParser.parse(new StringReader(bodyContent), wrap(listener)); break; case 2: createSyntaxFilter(bodyContent, macroContentSyntax).read(listener); break; default: break; } } private BeanInputFilterStream createSyntaxFilter(String bodyContent, Syntax macroContentSyntax) throws FilterException { InternalConfluenceXHTMLInputProperties filterProperties = new InternalConfluenceXHTMLInputProperties(); filterProperties.setSource(new StringInputSource(bodyContent)); filterProperties.setMacroContentSyntax(macroContentSyntax); if (this.properties.isConvertToXWiki()) { filterProperties.setConverter(createConverter(null)); } BeanInputFilterStreamFactory syntaxFilterFactory = ((BeanInputFilterStreamFactory) this.confluenceXHTMLParserFactory); return syntaxFilterFactory.createInputFilterStream(filterProperties); } private void readAttachment(long pageId, ConfluenceProperties attachmentProperties, Object filter, ConfluenceFilter proxyFilter) throws FilterException { String contentStatus = attachmentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTSTATUS, null); if (StringUtils.equals(contentStatus, "deleted")) { // The actual deleted attachment is not in the exported package so we can't really do anything with it return; } long attachmentId = attachmentProperties.getLong("id"); String attachmentName = this.confluencePackage.getAttachmentName(attachmentProperties); long attachmentSize; String mediaType = null; if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTPROPERTIES)) { ConfluenceProperties attachmentContentProperties = getContentProperties(attachmentProperties, ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTPROPERTIES); attachmentSize = attachmentContentProperties.getLong(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENT_FILESIZE, -1); if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTTYPE)) { mediaType = attachmentContentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENT_MEDIA_TYPE); } } else { attachmentSize = attachmentProperties.getLong(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENT_SIZE, -1); if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTTYPE)) { mediaType = attachmentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_CONTENTTYPE); } } Long version = this.confluencePackage.getAttachementVersion(attachmentProperties); long originalRevisionId = this.confluencePackage.getAttachmentOriginalVersionId(attachmentProperties, attachmentId); File contentFile; try { contentFile = this.confluencePackage.getAttachmentFile(pageId, originalRevisionId, version); } catch (Exception e) { this.logger.warn("Failed to find file corresponding to version [{}] attachment [{}] in page [{}]: {}", version, attachmentName, pageId, ExceptionUtils.getRootCauseMessage(e)); return; } FilterEventParameters attachmentParameters = new FilterEventParameters(); if (mediaType != null) { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_CONTENT_TYPE, mediaType); } if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_CREATION_AUTHOR)) { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_CREATION_AUTHOR, attachmentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_CREATION_AUTHOR)); } if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_CREATION_DATE)) { try { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_CREATION_DATE, this.confluencePackage .getDate(attachmentProperties, ConfluenceXMLPackage.KEY_ATTACHMENT_CREATION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse date", e); } } } attachmentParameters.put(WikiAttachmentFilter.PARAMETER_REVISION, String.valueOf(version)); if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_AUTHOR)) { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_REVISION_AUTHOR, attachmentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_AUTHOR)); } if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_DATE)) { try { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_REVISION_DATE, this.confluencePackage .getDate(attachmentProperties, ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_DATE)); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse date", e); } } } if (attachmentProperties.containsKey(ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_COMMENT)) { attachmentParameters.put(WikiAttachmentFilter.PARAMETER_REVISION_COMMENT, attachmentProperties.getString(ConfluenceXMLPackage.KEY_ATTACHMENT_REVISION_COMMENT)); } // WikiAttachment try (FileInputStream fis = new FileInputStream(contentFile)) { proxyFilter.onWikiAttachment(attachmentName, fis, attachmentSize != -1 ? attachmentSize : contentFile.length(), attachmentParameters); } catch (Exception e) { throw new FilterException("Failed to read attachment", e); } } private void readPageTags(ConfluenceProperties pageProperties, ConfluenceFilter proxyFilter, Map pageTags) throws FilterException { FilterEventParameters pageTagsParameters = new FilterEventParameters(); String objectName = getObjectName(pageProperties); // Tag object pageTagsParameters.put(WikiObjectFilter.PARAMETER_NUMBER, 0); pageTagsParameters.put(WikiObjectFilter.PARAMETER_CLASS_REFERENCE, "XWiki.TagClass"); proxyFilter.beginWikiObject(objectName, pageTagsParameters); // get page tags separated by | as string StringBuilder tagBuilder = new StringBuilder(); String prefix = ""; for (String tag : pageTags.keySet()) { tagBuilder.append(prefix); tagBuilder.append(tag); prefix = "|"; } // object property proxyFilter.onWikiObjectProperty("tags", tagBuilder.toString(), new FilterEventParameters()); proxyFilter.endWikiObject(objectName, pageTagsParameters); } private void readPageComment(ConfluenceProperties pageProperties, ConfluenceFilter proxyFilter, Long commentId, Map pageComments, Map commentIndeces) throws FilterException { String objectName = getObjectName(pageProperties); FilterEventParameters commentParameters = new FilterEventParameters(); // Comment object commentParameters.put(WikiObjectFilter.PARAMETER_NUMBER, commentIndeces.get(commentId)); commentParameters.put(WikiObjectFilter.PARAMETER_CLASS_REFERENCE, "XWiki.XWikiComments"); proxyFilter.beginWikiObject(objectName, commentParameters); // object properties ConfluenceProperties commentProperties = pageComments.get(commentId); // creator String commentCreator; if (commentProperties.containsKey("creatorName")) { // old creator reference by name commentCreator = commentProperties.getString("creatorName"); } else { // new creator reference by key commentCreator = commentProperties.getString("creator"); commentCreator = resolveUserName(commentCreator, commentCreator); } String commentCreatorReference = toUserReference(commentCreator); // content String commentBodyContent = this.confluencePackage.getCommentText(commentId); int commentBodyType = this.confluencePackage.getCommentBodyType(commentId); String commentText = commentBodyContent; if (commentBodyContent != null && this.properties.isConvertToXWiki()) { try { commentText = convertToXWiki21(commentBodyContent, commentBodyType); } catch (Exception e) { this.logger.error("Failed to convert content of the comment with id [{}]", commentId, e); } } // creation date Date commentDate = null; try { commentDate = this.confluencePackage.getDate(commentProperties, "creationDate"); } catch (java.text.ParseException e) { if (this.properties.isVerbose()) { this.logger.error("Failed to parse date", e); } } // parent (replyto) Integer parentIndex = null; if (commentProperties.containsKey("parent")) { Long parentId = commentProperties.getLong("parent"); parentIndex = commentIndeces.get(parentId); } proxyFilter.onWikiObjectProperty("author", commentCreatorReference, new FilterEventParameters()); proxyFilter.onWikiObjectProperty("comment", commentText, new FilterEventParameters()); proxyFilter.onWikiObjectProperty("date", commentDate, new FilterEventParameters()); proxyFilter.onWikiObjectProperty("replyto", parentIndex, new FilterEventParameters()); proxyFilter.endWikiObject(objectName, commentParameters); } private String getObjectName(ConfluenceProperties pageProperties) { // get parent name from reference String parentName = ""; try { EntityReference parentReference = getReferenceFromId(pageProperties, ConfluenceXMLPackage.KEY_PAGE_PARENT); if (parentReference != null) { parentName = parentReference.getName(); } } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.warn("Failed to parse parent", e); } } // use space name if there is no parent if (parentName.isEmpty()) { try { parentName = this.confluencePackage .getSpaceName(Long.valueOf(pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_SPACE))); } catch (NumberFormatException | ConfigurationException e) { if (this.properties.isVerbose()) { this.logger.warn("Failed to parse space", e); } } } // get page name String pageName = pageProperties.getString(ConfluenceXMLPackage.KEY_PAGE_TITLE); // create full page name from parent + title + WebHome StringBuilder nameBuilder = new StringBuilder(); if (!parentName.isEmpty()) { nameBuilder.append(parentName); nameBuilder.append("."); } if (!pageName.isEmpty()) { nameBuilder.append(pageName); nameBuilder.append("."); } nameBuilder.append("WebHome"); return nameBuilder.toString(); } private ConfluenceProperties getContentProperties(ConfluenceProperties properties, String key) throws FilterException { try { return this.confluencePackage.getContentProperties(properties, key); } catch (Exception e) { throw new FilterException("Failed to parse content properties", e); } } }