1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.syncope.core.persistence.jpa.content;
20
21 import java.io.IOException;
22 import java.io.InputStream;
23 import java.io.OutputStream;
24 import java.lang.reflect.Field;
25 import java.nio.charset.StandardCharsets;
26 import java.sql.Blob;
27 import java.sql.Connection;
28 import java.sql.DatabaseMetaData;
29 import java.sql.ResultSet;
30 import java.sql.ResultSetMetaData;
31 import java.sql.SQLException;
32 import java.sql.Timestamp;
33 import java.sql.Types;
34 import java.time.Instant;
35 import java.time.OffsetDateTime;
36 import java.time.ZoneId;
37 import java.util.ArrayList;
38 import java.util.Collections;
39 import java.util.HashMap;
40 import java.util.HashSet;
41 import java.util.List;
42 import java.util.Map;
43 import java.util.Objects;
44 import java.util.Optional;
45 import java.util.Set;
46 import java.util.StringJoiner;
47 import java.util.TreeMap;
48 import java.util.TreeSet;
49 import java.util.function.Supplier;
50 import java.util.stream.Stream;
51 import javax.persistence.CollectionTable;
52 import javax.persistence.Column;
53 import javax.persistence.EntityManagerFactory;
54 import javax.persistence.JoinTable;
55 import javax.persistence.Table;
56 import javax.persistence.metamodel.Attribute;
57 import javax.persistence.metamodel.EntityType;
58 import javax.persistence.metamodel.PluralAttribute;
59 import javax.sql.DataSource;
60 import javax.xml.XMLConstants;
61 import javax.xml.bind.DatatypeConverter;
62 import javax.xml.transform.OutputKeys;
63 import javax.xml.transform.Transformer;
64 import javax.xml.transform.TransformerConfigurationException;
65 import javax.xml.transform.sax.SAXTransformerFactory;
66 import javax.xml.transform.sax.TransformerHandler;
67 import javax.xml.transform.stream.StreamResult;
68 import org.apache.commons.lang3.StringUtils;
69 import org.apache.commons.lang3.tuple.Pair;
70 import org.apache.cxf.helpers.IOUtils;
71 import org.apache.openjpa.lib.util.collections.BidiMap;
72 import org.apache.openjpa.lib.util.collections.DualHashBidiMap;
73 import org.apache.syncope.common.lib.SyncopeConstants;
74 import org.apache.syncope.core.persistence.api.DomainHolder;
75 import org.apache.syncope.core.persistence.api.content.ContentExporter;
76 import org.apache.syncope.core.persistence.api.dao.AuditConfDAO;
77 import org.apache.syncope.core.persistence.api.dao.RealmDAO;
78 import org.apache.syncope.core.persistence.jpa.entity.JPARealm;
79 import org.apache.syncope.core.provisioning.api.utils.FormatUtils;
80 import org.apache.syncope.core.spring.ApplicationContextProvider;
81 import org.slf4j.Logger;
82 import org.slf4j.LoggerFactory;
83 import org.springframework.jdbc.core.JdbcTemplate;
84 import org.springframework.jdbc.datasource.DataSourceUtils;
85 import org.springframework.jdbc.support.JdbcUtils;
86 import org.springframework.jdbc.support.MetaDataAccessException;
87 import org.springframework.orm.jpa.EntityManagerFactoryUtils;
88 import org.xml.sax.SAXException;
89 import org.xml.sax.helpers.AttributesImpl;
90
91
92
93
94 public class XMLContentExporter implements ContentExporter {
95
96 protected static final Logger LOG = LoggerFactory.getLogger(XMLContentExporter.class);
97
98 protected static final Set<String> TABLE_PREFIXES_TO_BE_EXCLUDED = Set.of(
99 "QRTZ_", AuditConfDAO.AUDIT_ENTRY_TABLE);
100
101 protected static boolean isTableAllowed(final String tableName) {
102 return TABLE_PREFIXES_TO_BE_EXCLUDED.stream().
103 allMatch(prefix -> !tableName.toUpperCase().startsWith(prefix.toUpperCase()));
104 }
105
106 protected static String getValues(final ResultSet rs, final String columnName, final Integer columnType)
107 throws SQLException {
108
109 String value = null;
110
111 try {
112 switch (columnType) {
113 case Types.BINARY:
114 case Types.VARBINARY:
115 case Types.LONGVARBINARY:
116 InputStream is = rs.getBinaryStream(columnName);
117 if (is != null) {
118 value = DatatypeConverter.printHexBinary(IOUtils.toString(is).getBytes());
119 }
120 break;
121
122 case Types.BLOB:
123 Blob blob = rs.getBlob(columnName);
124 if (blob != null) {
125 value = DatatypeConverter.printHexBinary(IOUtils.toString(blob.getBinaryStream()).getBytes());
126 }
127 break;
128
129 case Types.BIT:
130 case Types.BOOLEAN:
131 value = rs.getBoolean(columnName) ? "1" : "0";
132 break;
133
134 case Types.DATE:
135 case Types.TIME:
136 case Types.TIMESTAMP:
137 Timestamp timestamp = rs.getTimestamp(columnName);
138 if (timestamp != null) {
139 value = FormatUtils.format(OffsetDateTime.ofInstant(
140 Instant.ofEpochMilli(timestamp.getTime()), ZoneId.systemDefault()));
141 }
142 break;
143
144 default:
145 value = rs.getString(columnName);
146 }
147 } catch (IOException e) {
148 LOG.error("Error fetching value from {}", columnName, e);
149 }
150
151 return value;
152 }
153
154 protected static String columnName(final Supplier<Stream<Attribute<?, ?>>> attrs, final String columnName) {
155 String name = attrs.get().map(attr -> {
156 if (attr.getName().equalsIgnoreCase(columnName)) {
157 return attr.getName();
158 }
159
160 Field field = (Field) attr.getJavaMember();
161 Column column = field.getAnnotation(Column.class);
162 if (column != null && column.name().equalsIgnoreCase(columnName)) {
163 return column.name();
164 }
165
166 return null;
167 }).filter(Objects::nonNull).findFirst().orElse(columnName);
168
169 if (StringUtils.endsWithIgnoreCase(name, "_ID")) {
170 String left = StringUtils.substringBefore(name, "_");
171 String prefix = attrs.get().filter(attr -> left.equalsIgnoreCase(attr.getName())).findFirst().
172 map(Attribute::getName).orElse(left);
173 name = prefix + "_id";
174 }
175
176 return name;
177 }
178
179 protected static Map<String, Pair<String, String>> relationTables(final BidiMap<String, EntityType<?>> entities) {
180 Map<String, Pair<String, String>> relationTables = new HashMap<>();
181
182 entities.values().stream().forEach(e -> e.getAttributes().stream().
183 filter(a -> a.getPersistentAttributeType() != Attribute.PersistentAttributeType.BASIC).
184 forEach(a -> {
185 Field field = (Field) a.getJavaMember();
186
187 String attrName = Optional.ofNullable(field.getAnnotation(Column.class)).
188 map(Column::name).
189 orElse(a.getName());
190
191 Optional.ofNullable(field.getAnnotation(CollectionTable.class)).
192 ifPresent(collectionTable -> relationTables.put(
193 collectionTable.name(),
194 Pair.of(attrName, collectionTable.joinColumns()[0].name())));
195
196 Optional.ofNullable(field.getAnnotation(JoinTable.class)).ifPresent(joinTable -> {
197 String tableName = joinTable.name();
198 if (StringUtils.isBlank(tableName)) {
199 tableName = entities.getKey(e) + "_"
200 + entities.getKey(((PluralAttribute) a).getElementType());
201 }
202
203 relationTables.put(
204 tableName,
205 Pair.of(joinTable.joinColumns()[0].name(),
206 joinTable.inverseJoinColumns()[0].name()));
207 });
208 }));
209
210 return relationTables;
211 }
212
213 protected static List<String> sortByForeignKeys(
214 final Connection conn, final String schema, final Set<String> tableNames)
215 throws SQLException {
216
217 Set<MultiParentNode<String>> roots = new HashSet<>();
218
219 DatabaseMetaData meta = conn.getMetaData();
220
221 Map<String, MultiParentNode<String>> exploited = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
222 Set<String> pkTableNames = new HashSet<>();
223
224 for (String tableName : tableNames) {
225 MultiParentNode<String> node = Optional.ofNullable(exploited.get(tableName)).orElseGet(() -> {
226 MultiParentNode<String> n = new MultiParentNode<>(tableName);
227 roots.add(n);
228 exploited.put(tableName, n);
229 return n;
230 });
231
232 pkTableNames.clear();
233 try (ResultSet rs = meta.getImportedKeys(conn.getCatalog(), schema, tableName)) {
234
235 while (rs.next()) {
236 pkTableNames.add(rs.getString("PKTABLE_NAME"));
237 }
238 }
239
240 pkTableNames.stream().
241 filter(pkTableName -> !tableName.equalsIgnoreCase(pkTableName)).
242 forEach(pkTableName -> {
243
244 MultiParentNode<String> pkNode = Optional.ofNullable(exploited.get(pkTableName)).
245 orElseGet(() -> {
246 MultiParentNode<String> n = new MultiParentNode<>(pkTableName);
247 roots.add(n);
248 exploited.put(pkTableName, n);
249 return n;
250 });
251
252 pkNode.addChild(node);
253
254 if (roots.contains(node)) {
255 roots.remove(node);
256 }
257 });
258 }
259
260 List<String> sortedTableNames = new ArrayList<>(tableNames.size());
261 MultiParentNodeOp.traverseTree(roots, sortedTableNames);
262
263
264
265 sortedTableNames.retainAll(tableNames);
266
267 LOG.debug("Tables after retainAll {}", sortedTableNames);
268
269 Collections.reverse(sortedTableNames);
270
271 return sortedTableNames;
272 }
273
274 protected final DomainHolder domainHolder;
275
276 protected final RealmDAO realmDAO;
277
278 public XMLContentExporter(final DomainHolder domainHolder, final RealmDAO realmDAO) {
279 this.domainHolder = domainHolder;
280 this.realmDAO = realmDAO;
281 }
282
283 @SuppressWarnings("unchecked")
284 protected void exportTable(
285 final DataSource dataSource,
286 final String tableName,
287 final int threshold,
288 final BidiMap<String, EntityType<?>> entities,
289 final Map<String, Pair<String, String>> relationTables,
290 final TransformerHandler handler) throws SQLException, MetaDataAccessException, SAXException {
291
292 LOG.debug("Export table {}", tableName);
293
294 String orderBy = JdbcUtils.extractDatabaseMetaData(dataSource, meta -> {
295 StringJoiner ob = new StringJoiner(",");
296
297
298 try (ResultSet pkeyRS = meta.getPrimaryKeys(null, null, tableName)) {
299 while (pkeyRS.next()) {
300 Optional.ofNullable(pkeyRS.getString("COLUMN_NAME")).ifPresent(ob::add);
301 }
302 }
303
304 return ob.toString();
305 });
306
307
308 StringBuilder query = new StringBuilder();
309 query.append("SELECT * FROM ").append(tableName).append(" a");
310 if (StringUtils.isNotBlank(orderBy)) {
311 query.append(" ORDER BY ").append(orderBy);
312 }
313
314 JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
315 jdbcTemplate.setMaxRows(threshold);
316
317 Optional<EntityType<?>> entity = entities.entrySet().stream().
318 filter(entry -> entry.getKey().equalsIgnoreCase(tableName)).
319 findFirst().
320 map(Map.Entry::getValue);
321
322 String outputTableName = entity.map(entities::getKey).
323 orElseGet(() -> relationTables.keySet().stream().
324 filter(tableName::equalsIgnoreCase).findFirst().
325 orElse(tableName));
326
327 List<Map<String, String>> rows = new ArrayList<>();
328
329 jdbcTemplate.query(query.toString(), rs -> {
330 Map<String, String> row = new HashMap<>();
331 rows.add(row);
332
333 ResultSetMetaData rsMeta = rs.getMetaData();
334 for (int i = 0; i < rsMeta.getColumnCount(); i++) {
335 String columnName = rsMeta.getColumnName(i + 1);
336 Integer columnType = rsMeta.getColumnType(i + 1);
337
338
339 Optional.ofNullable(getValues(rs, columnName, columnType)).ifPresent(value -> {
340 String name = entity.map(e -> columnName(
341 () -> (Stream<Attribute<?, ?>>) e.getAttributes().stream(), columnName)).
342 orElse(columnName);
343
344 if (relationTables.containsKey(outputTableName)) {
345 Pair<String, String> relationColumns = relationTables.get(outputTableName);
346 if (name.equalsIgnoreCase(relationColumns.getLeft())) {
347 name = relationColumns.getLeft();
348 } else if (name.equalsIgnoreCase(relationColumns.getRight())) {
349 name = relationColumns.getRight();
350 }
351 }
352
353 row.put(name, value);
354 LOG.debug("Add for table {}: {}=\"{}\"", outputTableName, name, value);
355 });
356 }
357 });
358
359 if (tableName.equalsIgnoreCase(JPARealm.TABLE)) {
360 List<Map<String, String>> realmRows = new ArrayList<>(rows);
361 rows.clear();
362 realmDAO.findDescendants(SyncopeConstants.ROOT_REALM, null, -1, -1).
363 forEach(realm -> realmRows.stream().filter(row -> {
364
365 String id = Optional.ofNullable(row.get("ID")).orElseGet(() -> row.get("id"));
366 return realm.getKey().equals(id);
367 }).findFirst().ifPresent(rows::add));
368 }
369
370 for (Map<String, String> row : rows) {
371 AttributesImpl attrs = new AttributesImpl();
372 row.forEach((key, value) -> attrs.addAttribute("", "", key, "CDATA", value));
373
374 handler.startElement("", "", outputTableName, attrs);
375 handler.endElement("", "", outputTableName);
376 }
377 }
378
379 @Override
380 public void export(
381 final String domain,
382 final int tableThreshold,
383 final OutputStream os)
384 throws SAXException, TransformerConfigurationException {
385
386 StreamResult streamResult = new StreamResult(os);
387 SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
388 transformerFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
389
390 TransformerHandler handler = transformerFactory.newTransformerHandler();
391 Transformer serializer = handler.getTransformer();
392 serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
393 serializer.setOutputProperty(OutputKeys.INDENT, "yes");
394 handler.setResult(streamResult);
395 handler.startDocument();
396 handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());
397
398 DataSource dataSource = Optional.ofNullable(domainHolder.getDomains().get(domain)).
399 orElseThrow(() -> new IllegalArgumentException("Could not find DataSource for domain " + domain));
400
401 String schema = null;
402 if (ApplicationContextProvider.getBeanFactory().containsBean(domain + "DatabaseSchema")) {
403 Object schemaBean = ApplicationContextProvider.getBeanFactory().getBean(domain + "DatabaseSchema");
404 if (schemaBean instanceof String) {
405 schema = (String) schemaBean;
406 }
407 }
408
409 Connection conn = DataSourceUtils.getConnection(dataSource);
410 try (ResultSet rs = conn.getMetaData().
411 getTables(null, StringUtils.isBlank(schema) ? null : schema, null, new String[] { "TABLE" })) {
412
413 Set<String> tableNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
414
415 while (rs.next()) {
416 String tableName = rs.getString("TABLE_NAME");
417 LOG.debug("Found table {}", tableName);
418 if (isTableAllowed(tableName)) {
419 tableNames.add(tableName);
420 }
421 }
422
423 LOG.debug("Tables to be exported {}", tableNames);
424
425 EntityManagerFactory emf = EntityManagerFactoryUtils.findEntityManagerFactory(
426 ApplicationContextProvider.getBeanFactory(), domain);
427 Set<EntityType<?>> entityTypes = emf == null ? Set.of() : emf.getMetamodel().getEntities();
428 BidiMap<String, EntityType<?>> entities = new DualHashBidiMap<>();
429 entityTypes.forEach(entity -> Optional.ofNullable(
430 entity.getBindableJavaType().getAnnotation(Table.class)).
431 ifPresent(table -> entities.put(table.name(), entity)));
432
433
434 for (String tableName : sortByForeignKeys(conn, schema, tableNames)) {
435 try {
436 exportTable(dataSource, tableName, tableThreshold, entities, relationTables(entities), handler);
437 } catch (Exception e) {
438 LOG.error("Failure exporting table {}", tableName, e);
439 }
440 }
441 } catch (SQLException e) {
442 LOG.error("While exporting database content", e);
443 } finally {
444 DataSourceUtils.releaseConnection(conn, dataSource);
445 }
446
447 handler.endElement("", "", ROOT_ELEMENT);
448 handler.endDocument();
449 }
450 }