/* * Dump package/portfolio properties. The properties are written to stdout * record-by-record. In addition they are written to a CSV file that can be * imported into spreadsheets. * * Required software: pCOS interface 8 (PDFlib+PDI/PPS 9, TET 4.1, PLOP 5.0) * Required data: PDF document that is a package or a portfolio and that has * package properties */ package com.pdflib.cookbook.pcos.interactive; import java.io.File; import java.io.FileOutputStream; import java.io.PrintWriter; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.pdflib.IpCOS; import com.pdflib.cookbook.pcos.pcos_cookbook_example; public class package_properties extends pcos_cookbook_example { /** * This is where the data files are. Adjust as necessary. */ private final static String SEARCH_PATH = "../input"; /** * The field separator character used in the CSV file output. */ private final static char CSV_SEPARATOR_CHAR = ','; public void example_code(IpCOS p, int doc) throws Exception { String filename = p.pcos_get_string(doc, "filename"); System.out.println("File name: " + filename); System.out.println(); /* Check whether the document represents a PDF package */ String colltype = p.pcos_get_string(doc, "type:/Root/Collection"); if (colltype.equals("dict")) { String defaultdoc; boolean has_folders = p.pcos_get_string(doc, "type:/Root/Collection/Folders").equals("dict"); String doctype = has_folders ? "portfolio" : "package"; System.out.print("PDF " + doctype + ", default document: "); /* * Check the default document (may be different from container PDF); * we currently don't check whether this name is actually present in * the list of embedded files. */ if (p.pcos_get_string(doc, "type:/Root/Collection/D").equals( "string")) { defaultdoc = "'" + p.pcos_get_string(doc, "/Root/Collection/D") + "'"; } else { defaultdoc = "container PDF"; } System.out.println(defaultdoc); File input_file = new File(filename); String basename = input_file.getName(); String csv_name = basename + ".csv"; System.out.println("Writing properties also to CSV file \"" + csv_name + "\""); PrintWriter csv_file = new PrintWriter(new FileOutputStream( csv_name)); print_package_properties(p, doc, has_folders, csv_file); csv_file.close(); } else { System.out.println("Input document is not a PDF package"); } System.out.println(); } /** * Description of a CollectionField object (see "TABLE 8.8 Entries in a * collection field dictionary" in the Adobe PDF Reference 1.7) */ private class collection_field { /** * A text field. */ public final static int TYPE_S = 0; /** * A date field. */ public final static int TYPE_D = 1; /** * A number field. */ public final static int TYPE_N = 2; /** * The field data is the file name of the embedded file stream. */ public final static int TYPE_F = 3; /** * The field data is the description of the embedded file stream. */ public final static int TYPE_DESC = 4; /** * The field data is the modification date of the embedded file stream. */ public final static int TYPE_MODDATE = 5; /** * The field data is the creation date of the embedded file stream. */ public final static int TYPE_CREATIONDATE = 6; /** * The field data is the creation date of the embedded file stream. */ public final static int TYPE_SIZE = 7; /** * The "Subtype" element of the dictionary, one of the above "TYPE_..." * values. */ public int subtype; /** * The textual field name that is displayed to the user. */ public String display_name; /** * The relative order of the field name in the user interface. */ public int order; /** * The initial visibility of the field in the user interface. */ public boolean visibility; /** * Whether the field should be editable in the user interface. */ public boolean editable; /** * Constructor with default values. */ public collection_field() { subtype = -1; display_name = ""; order = -1; visibility = true; editable = false; } } /** * An array to map the above integer-encoded field types to human-readable * strings. */ public static final String type_names[] = { "text", "date", "number", "file name", "description", "modification date", "creation date", "size" }; /** * Description of a schema dictionary. The class has fixed collection_field * members for the predefined file-related fields, and a Map that stores the * custom fields under their name as the key. * */ private class schema_dictionary { /** * The number of predefined fields. */ public static final int NUMBER_OF_PREDEFINED_FIELDS = 5; public collection_field predef_f; public collection_field predef_desc; public collection_field predef_moddate; public collection_field predef_creationdate; public collection_field predef_size; public Map custom_fields = new HashMap(); /** * Default constructor that sets the predefined fields to defaults. If a * schema dictionary is present, it will override the built-in defaults. */ public schema_dictionary() { predef_f = new collection_field(); predef_f.display_name = "Name"; predef_f.editable = true; predef_f.subtype = collection_field.TYPE_F; predef_desc = new collection_field(); predef_desc.display_name = "Description"; predef_desc.editable = true; predef_desc.subtype = collection_field.TYPE_DESC; predef_moddate = new collection_field(); predef_moddate.display_name = "Modified Date"; predef_moddate.subtype = collection_field.TYPE_MODDATE; predef_creationdate = new collection_field(); predef_creationdate.display_name = "Creation Date"; predef_creationdate.subtype = collection_field.TYPE_CREATIONDATE; predef_size = new collection_field(); predef_size.display_name = "Size"; predef_size.subtype = collection_field.TYPE_SIZE; } } /** * Contains the information of a parsed collection schema dictionary. */ private schema_dictionary schema = new schema_dictionary(); /** * Map of folder IDs to folder names (see "TABLE 8.6c Entries in a folder * dictionary" in "Adobe Supplement to the ISO 32000 BaseVersion: 1.7 * ExtensionLevel: 3" document) * * Every folder in a portfolio has a unique ID. For being able to display a * folder name for an item in a portfolio, the map is built once by * analyzing the folder structure under "/Root/Collection/Folders". */ private Map folders = null; /** * Analyze the collection schema dictionary if present, and print out the * properties of all packages. * * @param p * The {@link IpCOS} object * @param doc * A valid document handle * @param csv_file * The {@link PrintWriter} object for producing the CSV file * * @throws Exception */ private void print_package_properties(IpCOS p, int doc, boolean has_folders, PrintWriter csv_file) throws Exception { analyze_schema(p, doc); if (has_folders) { analyze_folders(p, doc); } print_schema_properties(); print_schema_properties(csv_file); print_properties(p, doc); print_properties(csv_file, p, doc); } /** * Parse the folder structure under the pCOS path "/Root/Collection/Folders" * and save the mapping of folder IDs to folder names in the * {@link #folders folders} member. * * @param p * The {@link IpCOS} object * @param doc * A valid document handle * * @throws Exception */ private void analyze_folders(IpCOS p, int doc) throws Exception { folders = new HashMap(); /* * In order to avoid very long pCOS pathnames, we operate with pCOS * object IDs here. */ int root_folder_id = (int) p.pcos_get_number(doc, "pcosid:/Root/Collection/Folders"); walk_folder_tree(p, doc, root_folder_id, ""); } /** * Recursive method to walk the folder tree. * * @param p * The {@link IpCOS} object * @param doc * A valid document handle * @param node_id * The pCOS object ID of the current folder node * @param parent_folder_path * The accumulated pathname for the parent folder * * @throws Exception */ private void walk_folder_tree(IpCOS p, int doc, int node_id, String parent_folder_path) throws Exception { boolean has_next = true; while (has_next) { /* * Actually every folder dictionary should have a /Name entry, but * we've seen documents where it is missing. For the root folder it * is usually empty. */ String node_path = "objects[" + node_id + "]"; String node_name_path = node_path + "/Name"; String node_name = p.pcos_get_string(doc, "type:" + node_name_path) .equals("string") ? p.pcos_get_string(doc, node_name_path) : ""; String node_id_path = node_path + "/ID"; int folder_id = (int) p.pcos_get_number(doc, node_id_path); String separator = parent_folder_path.endsWith("/") ? "" : "/"; String current_path = parent_folder_path + separator + node_name; folders.put(Integer.valueOf(folder_id), current_path); System.out.println("folder id " + folder_id + " path " + current_path); String child_path = node_path + "/Child"; if (p.pcos_get_string(doc, "type:" + child_path).equals("dict")) { int child_node_id = (int) p.pcos_get_number(doc, "pcosid:" + child_path); walk_folder_tree(p, doc, child_node_id, current_path); } String next_path = node_path + "/Next"; has_next = p.pcos_get_string(doc, "type:" + next_path).equals( "dict"); if (has_next) { node_id = (int) p.pcos_get_number(doc, "pcosid:" + next_path); } } } /** * Read in a collection field dictionary. Analyze, whether it describes one * of the predefined properties or a custom property, and save it in the * {@link #schema} member. * * @param p * The IpCOS object * @param doc * A valid document handle * @param key * The name of the entry in the collection schema dictionary * @param value_path * The pCOS path for the collection field dictionary * * @throws Exception */ private void read_collection_field(IpCOS p, int doc, String key, String value_path) throws Exception { collection_field field = new collection_field(); // The display name is mandatory. field.display_name = p.pcos_get_string(doc, value_path + "/N"); // The relative order is optional. String order_path = value_path + "/O"; String objtype = p.pcos_get_string(doc, "type:" + order_path); if (objtype.equals("integer")) { field.order = (int) p.pcos_get_number(doc, order_path); } // The visibility is optional. String visibility_path = value_path + "/V"; objtype = p.pcos_get_string(doc, "type:" + visibility_path); if (objtype.equals("boolean")) { field.visibility = (int) p.pcos_get_number(doc, visibility_path) != 0; } // The editability is optional. String editable_path = value_path + "/E"; objtype = p.pcos_get_string(doc, "type:" + editable_path); if (objtype.equals("boolean")) { field.editable = (int) p.pcos_get_number(doc, editable_path) != 0; } // The subtype is required. String subtype = p.pcos_get_string(doc, value_path + "/Subtype"); if (subtype.equals("S")) { field.subtype = collection_field.TYPE_S; schema.custom_fields.put(key, field); } else if (subtype.equals("D")) { field.subtype = collection_field.TYPE_D; schema.custom_fields.put(key, field); } else if (subtype.equals("N")) { field.subtype = collection_field.TYPE_N; schema.custom_fields.put(key, field); } else if (subtype.equals("F")) { field.subtype = collection_field.TYPE_F; schema.predef_f = field; } else if (subtype.equals("Desc")) { field.subtype = collection_field.TYPE_DESC; schema.predef_desc = field; } else if (subtype.equals("ModDate")) { field.subtype = collection_field.TYPE_MODDATE; schema.predef_moddate = field; } else if (subtype.equals("CreationDate")) { field.subtype = collection_field.TYPE_CREATIONDATE; schema.predef_creationdate = field; } else if (subtype.equals("Size")) { field.subtype = collection_field.TYPE_SIZE; schema.predef_size = field; } } /** * Read in the collection schema dictionary. * * @param p * The IpCOS object * @param doc * A valid document handle * * @throws Exception */ private void analyze_schema(IpCOS p, int doc) throws Exception { String schema_path = "/Root/Collection/Schema"; String objtype = p.pcos_get_string(doc, "type:" + schema_path); if (objtype.equals("dict")) { int length = (int) p.pcos_get_number(doc, "length:" + schema_path); for (int i = 0; i < length; i += 1) { String schema_entry_path = schema_path + "[" + i + "]"; String key = p.pcos_get_string(doc, schema_entry_path + ".key"); String value_path = schema_entry_path + ".val"; objtype = p.pcos_get_string(doc, "type:" + value_path); if (objtype.equals("dict")) { read_collection_field(p, doc, key, value_path); } else if (key.equals("Type") && objtype.equals("name")) { String dictionaryType = p.pcos_get_string(doc, value_path); if (!dictionaryType.equals("CollectionSchema")) { System.out.println("Illegal type \"" + dictionaryType + "\" for collection schema dictionary"); } } else { System.out .println("Illegal entry in collection schema dictionary " + "(key: " + key + ", objtype: " + objtype + ")"); } } } else { System.out.println("No collection schema dictionary present"); /* * Try to detect Acrobat 9 Portfolios. At the time this pCOS * Cookbook example was written, there was no documentation about * the Portfolio implementation available, so this is done * heuristically. */ String acrobat9_entries[] = { "Folder", "Color", "Navigator", "Resources" }; boolean acrobat9_assumed = false; for (int i = 0; i < acrobat9_entries.length && !acrobat9_assumed; i += 1) { String path = "/Root/Collection/" + acrobat9_entries[i]; objtype = p.pcos_get_string(doc, "type:" + path); acrobat9_assumed = objtype.equals("dict"); } if (acrobat9_assumed) { System.out.println("This looks like an Acrobat 9 Portfolio"); } } } /** * Print the collection schema. This is a description of all the properties * that can be attached to the package members. * * @throws Exception */ private void print_schema_properties() throws Exception { System.out.println(); System.out.println("Collection schema:"); int property_number = 1; print_schema_property(property_number++, schema.predef_f, null); print_schema_property(property_number++, schema.predef_desc, null); print_schema_property(property_number++, schema.predef_moddate, null); print_schema_property(property_number++, schema.predef_creationdate, null); print_schema_property(property_number++, schema.predef_size, null); Set> customFields = schema.custom_fields.entrySet(); Iterator> i = customFields.iterator(); while (i.hasNext()) { Entry entry = i.next(); String key = entry.getKey(); collection_field field_desc = entry.getValue(); print_schema_property(property_number++, field_desc, key); } } /** * Print the header section of the CSV file that describes the collection * schema. * * @param csv_file * The {@link PrintWriter} object for producing the CSV file */ private void print_schema_properties(PrintWriter csv_file) { /* * Add 1 for the first columns with the row description. */ int number_of_columns = schema_dictionary.NUMBER_OF_PREDEFINED_FIELDS + schema.custom_fields.size() + 1; /* * Add 1 for the folder name column if this is a portfolio. */ if (folders != null) { number_of_columns += 1; } /* * Array with the row descriptions for the schema header section. */ String row_descriptions[] = { "Field name", "Kind", "Type", "Order", "Visibility", "Editable" }; /* * Allocate a two-dimensional String array to make it simpler to write * out the header section with the schema description. */ String schema_header[][] = new String[row_descriptions.length][number_of_columns]; /* * Transfer row_names. */ int column_counter = 0; for (int i = 0; i < row_descriptions.length; i += 1) { schema_header[i][column_counter] = row_descriptions[i]; } /* * Transfer column for folder name if this is a portfolio. */ if (folders != null) { column_counter += 1; int row = 0; schema_header[row++][column_counter] = "Folder"; schema_header[row++][column_counter] = "built-in"; schema_header[row++][column_counter] = "folder name"; schema_header[row++][column_counter] = ""; schema_header[row++][column_counter] = ""; schema_header[row++][column_counter] = ""; } /* * Transfer predefined fields. */ column_counter += 1; csv_schema_property(schema_header, column_counter++, schema.predef_f, null); csv_schema_property(schema_header, column_counter++, schema.predef_desc, null); csv_schema_property(schema_header, column_counter++, schema.predef_moddate, null); csv_schema_property(schema_header, column_counter++, schema.predef_creationdate, null); csv_schema_property(schema_header, column_counter++, schema.predef_size, null); /* * Transfer custom fields. */ Set> customFields = schema.custom_fields.entrySet(); Iterator> i = customFields.iterator(); while (i.hasNext()) { Entry entry = i.next(); String key = entry.getKey(); collection_field field_desc = entry.getValue(); csv_schema_property(schema_header, column_counter++, field_desc, key); } for (int row = 0; row < row_descriptions.length; row += 1) { csv_row_description(csv_file, schema_header[row][0]); for (int column = 1; column < column_counter; column += 1) { csv_column(csv_file, schema_header[row][column]); } csv_row_complete(csv_file); } } /** * Print the description for a single property in the schema. * * @param i * The relative number of the property * @param field * The {@link collection_field} description of the field * @param key * The key in the collection item (CI) dictionary for a custom * property, null otherwise */ private void print_schema_property(int i, collection_field field, String key) { print_formatted("Property number", "" + i); print_formatted("Property kind", key == null ? "built-in" : "custom"); print_formatted("Display name", field.display_name); print_formatted("Type", type_names[field.subtype]); print_formatted("Order", field.order != -1 ? "" + field.order : ""); print_formatted("Visibility", field.visibility ? "true" : "false"); print_formatted("Editable", field.editable ? "true" : "false"); } /** * Perform the printing of the actual properties. First the schema must have * been analyzed. As the lengths of the fields are unknown upfront, the * properties are printed record by record, to avoid getting ugly tables. * The Sort dictionary in the Collection dictionary is ignored, so the * properties are printed out in the order in which the documents appear in * the Names array. * * @param p * The IpCOS object * @param doc * A valid document handle * * @throws Exception */ private void print_properties(IpCOS p, int doc) throws Exception { System.out.println(); System.out.println("Properties of package members:"); int filecount = (int) p.pcos_get_number(doc, "length:names/EmbeddedFiles"); for (int file_index = 0; file_index < filecount; file_index++) { System.out.println("Package member #" + (file_index + 1) + ":"); print_file_properties(p, doc, "names/EmbeddedFiles[" + file_index + "]"); } } /** * Perform the printing of the actual properties to the CSV file. * * @param csv_file * The CSV file * @param p * The {@link IpCOS} object * @param doc * A valid document handle * @throws Exception */ private void print_properties(PrintWriter csv_file, IpCOS p, int doc) throws Exception { int filecount = (int) p.pcos_get_number(doc, "length:names/EmbeddedFiles"); for (int file_index = 0; file_index < filecount; file_index++) { csv_row_description(csv_file, "Package member #" + (file_index + 1)); print_file_properties(csv_file, p, doc, "names/EmbeddedFiles[" + file_index + "]"); csv_row_complete(csv_file); } } /** * Print the properties for a single file. * * @param p * The IpCOS object * @param doc * A valid document handle * @param file_index * The pCOS path for the EmbeddedFiles entry * * @throws Exception */ private void print_file_properties(IpCOS p, int doc, String file_path) throws Exception { if (folders != null) { print_folder_path(p, doc, file_path); } int property_number = 1; print_property(p, doc, property_number++, file_path, schema.predef_f, null); print_property(p, doc, property_number++, file_path, schema.predef_desc, null); print_property(p, doc, property_number++, file_path, schema.predef_moddate, null); print_property(p, doc, property_number++, file_path, schema.predef_creationdate, null); print_property(p, doc, property_number++, file_path, schema.predef_size, null); Set> customFields = schema.custom_fields.entrySet(); Iterator> i = customFields.iterator(); while (i.hasNext()) { Entry entry = i.next(); String key = entry.getKey(); collection_field field_desc = entry.getValue(); print_property(p, doc, property_number++, file_path, field_desc, key); } } /** * Print out the folder pathname in the portfolio. * * @param p * The IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the EmbeddedFiles entry * * @throws Exception */ private void print_folder_path(IpCOS p, int doc, String file_path) throws Exception { String folder = get_folder_path(p, doc, file_path); print_formatted("Folder", folder); } /** * Files in the "EmbeddedFiles" name tree are linked via a special naming * convention "<id>filename" to their folders. "id" is the folder id * that is stored in the folder dictionary. * * @param p * The IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the EmbeddedFiles entry * * @return the folder name * * @throws Exception */ private String get_folder_path(IpCOS p, int doc, String file_path) throws Exception { String folder_link = p.pcos_get_string(doc, file_path + ".key"); /* * The first character must be the '<' character if the file is * associated with a folder. The '>' must follow after at least one * character. */ int lt_index = folder_link.indexOf('<'); int gt_index = folder_link.indexOf('>'); /* * Extract the folder id. If a name does not conform to the rules, the * file is treated as associated with the root folder. */ String folder = "/"; if (lt_index == 0 && gt_index >= 2) { String folder_id_string = folder_link.substring(1, gt_index); try { Integer folder_id = Integer.valueOf(folder_id_string); folder = folders.get(folder_id); } catch (NumberFormatException e) { // Invalid id number in the folder id string, assume root folder } } return folder; } /** * Print out a single property to the CSV file. * * @param csv_file * The CSV file * @param p * An IpCOS object * @param doc * A valid document handle * @param number * The relative number of the property * @param file_path * The pCOS path for the package member * @param field * The @{link collection_field} description of the field * @param key * The key in the "CI" dictionary for the package member * * @throws Exception */ private void print_property(PrintWriter csv_file, IpCOS p, int doc, String file_path, collection_field field, String key) throws Exception { switch (field.subtype) { case collection_field.TYPE_S: csv_column(csv_file, get_text_field(p, doc, file_path, key)); break; case collection_field.TYPE_D: // we print dates as strings csv_column(csv_file, get_text_field(p, doc, file_path, key)); break; case collection_field.TYPE_N: csv_column(csv_file, get_number_field(p, doc, file_path, key)); break; case collection_field.TYPE_F: csv_column(csv_file, get_filename(p, doc, file_path)); break; case collection_field.TYPE_DESC: csv_column(csv_file, get_description(p, doc, file_path)); break; case collection_field.TYPE_MODDATE: csv_column(csv_file, get_moddate(p, doc, file_path)); break; case collection_field.TYPE_CREATIONDATE: csv_column(csv_file, get_creationdate(p, doc, file_path)); break; case collection_field.TYPE_SIZE: csv_column(csv_file, get_size(p, doc, file_path)); break; } } /** * Print the properties for a single file to the CSV file. * * @param csv_file * @param p * The IpCOS object * @param doc * A valid document handle * @param file_index * The pCOS path for the EmbeddedFiles entry * @throws Exception */ private void print_file_properties(PrintWriter csv_file, IpCOS p, int doc, String file_path) throws Exception { if (folders != null) { csv_column(csv_file, get_folder_path(p, doc, file_path)); } print_property(csv_file, p, doc, file_path, schema.predef_f, null); print_property(csv_file, p, doc, file_path, schema.predef_desc, null); print_property(csv_file, p, doc, file_path, schema.predef_moddate, null); print_property(csv_file, p, doc, file_path, schema.predef_creationdate, null); print_property(csv_file, p, doc, file_path, schema.predef_size, null); Set> customFields = schema.custom_fields.entrySet(); Iterator> i = customFields.iterator(); while (i.hasNext()) { Entry entry = i.next(); String key = entry.getKey(); collection_field field_desc = entry.getValue(); print_property(csv_file, p, doc, file_path, field_desc, key); } } /** * Print out a single property. * * @param p * An IpCOS object * @param doc * A valid document handle * @param number * The relative number of the property * @param file_path * The pCOS path for the package member * @param field * The @{link collection_field} description of the field * @param key * The key in the "CI" dictionary for the package member * * @throws Exception */ private void print_property(IpCOS p, int doc, int number, String file_path, collection_field field, String key) throws Exception { print_formatted("Property number", "" + number); String value = null; switch (field.subtype) { case collection_field.TYPE_S: value = get_text_field(p, doc, file_path, key); break; case collection_field.TYPE_D: // we print dates as strings value = get_text_field(p, doc, file_path, key); break; case collection_field.TYPE_N: value = get_number_field(p, doc, file_path, key); break; case collection_field.TYPE_F: value = get_filename(p, doc, file_path); break; case collection_field.TYPE_DESC: value = get_description(p, doc, file_path); break; case collection_field.TYPE_MODDATE: value = get_moddate(p, doc, file_path); break; case collection_field.TYPE_CREATIONDATE: value = get_creationdate(p, doc, file_path); break; case collection_field.TYPE_SIZE: value = get_size(p, doc, file_path); break; } print_formatted("Display name", field.display_name); print_formatted("Value", value); } /** * Get the size for a package member. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path for the package member * * @return The size of the package member in bytes as String * * @throws Exception */ private String get_size(IpCOS p, int doc, String file_path) throws Exception { String size_path = file_path + "/EF/F/Params/Size"; String retval = null; String objtype = p.pcos_get_string(doc, "type:" + size_path); if (objtype.equals("number")) { retval = "" + (int) p.pcos_get_number(doc, size_path); } return retval; } /** * Get the creation date of a package member. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path for the package member * * @return The creation date in PDF format as String * * @throws Exception */ private String get_creationdate(IpCOS p, int doc, String file_path) throws Exception { return get_date(p, doc, file_path + "/EF/F/Params/CreationDate"); } /** * Get the modification date of a package member. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path for the package member * * @return The modification date in PDF format as String * * @throws Exception */ private String get_moddate(IpCOS p, int doc, String file_path) throws Exception { return get_date(p, doc, file_path + "/EF/F/Params/ModDate"); } /** * Get the date from the given IpCOS path. * * @param p * An IpCOS object * @param doc * A valid document handle * @param date_path * The pCOS path for the desired date * * @return The date stored under the given pCOS path if it exists, otherwise * null * * @throws Exception */ private String get_date(IpCOS p, int doc, String date_path) throws Exception { String retval = null; String objtype = p.pcos_get_string(doc, "type:" + date_path); if (objtype.equals("string")) { retval = p.pcos_get_string(doc, date_path); } return retval; } /** * Get the description of a package member. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the package member * * @return The description for the package member as String if it is * available, othwise null * * @throws Exception */ private String get_description(IpCOS p, int doc, String file_path) throws Exception { String desc_path = file_path + "/Desc"; String retval = null; String objtype = p.pcos_get_string(doc, "type:" + desc_path); if (objtype.equals("string")) { retval = p.pcos_get_string(doc, desc_path); } return retval; } /** * Get the filename of a package member. Only the "UF" and "F" entries are * examined. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the package member * * @return The filename of the package member if it exists, otherwise null * * @throws Exception */ private String get_filename(IpCOS p, int doc, String file_path) throws Exception { String fname_paths[] = { file_path + "/UF", file_path + "/F" }; String retval = null; for (int i = 0; i < fname_paths.length && retval == null; i += 1) { String objtype = p.pcos_get_string(doc, "type:" + fname_paths[i]); if (objtype.equals("string")) { retval = p.pcos_get_string(doc, fname_paths[i]); } } return retval; } /** * Get a custom number field. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the package member * @param key * The key of the custom field in the collection item dictionary * * @return The number as String if it is available, otherwise null * * @throws Exception */ private String get_number_field(IpCOS p, int doc, String file_path, String key) throws Exception { String result = null; String collection_item_path = file_path + "/CI/" + key; String objtype = p.pcos_get_string(doc, "type:" + collection_item_path); if (objtype.equals("number")) { result = "" + p.pcos_get_number(doc, collection_item_path); } else if (objtype.equals("dict")) { /* * A "collection subitem dictionary" */ String data = ""; String collection_subitem_data_path = collection_item_path + "/D"; objtype = p.pcos_get_string(doc, "type:" + collection_subitem_data_path); if (objtype.equals("number")) { data = "" + p.pcos_get_number(doc, collection_subitem_data_path); } String prefix = ""; String collection_subitem_prefix_path = collection_item_path + "/P"; objtype = p.pcos_get_string(doc, "type:" + collection_subitem_prefix_path); if (objtype.equals("string")) { prefix = p.pcos_get_string(doc, collection_subitem_prefix_path); } result = prefix + data; } return result; } /** * Get a custom text field. This is also used for date fields. * * @param p * An IpCOS object * @param doc * A valid document handle * @param file_path * The pCOS path of the package member * @param key * The key of the custom field in the collection item dictionary * * @return The text/date as String if it is available, otherwise null * * @throws Exception */ private String get_text_field(IpCOS p, int doc, String file_path, String key) throws Exception { String result = null; String collection_item_path = file_path + "/CI/" + key; String objtype = p.pcos_get_string(doc, "type:" + collection_item_path); if (objtype.equals("string")) { result = p.pcos_get_string(doc, collection_item_path); } else if (objtype.equals("dict")) { /* * A "collection subitem dictionary" */ String data = ""; String collection_subitem_data_path = collection_item_path + "/D"; objtype = p.pcos_get_string(doc, "type:" + collection_subitem_data_path); if (objtype.equals("string")) { data = p.pcos_get_string(doc, collection_subitem_data_path); } String prefix = ""; String collection_subitem_prefix_path = collection_item_path + "/P"; objtype = p.pcos_get_string(doc, "type:" + collection_subitem_prefix_path); if (objtype.equals("string")) { prefix = p.pcos_get_string(doc, collection_subitem_prefix_path); } result = prefix + data; } return result; } /** * Constant for aligning all the labels in the output. */ private static final int DESC_WIDTH = 20; /** * Print label/value pairs, aligned using the {@link #DESC_WIDTH} constant. * * @param description * The label * @param value * The value */ private void print_formatted(String description, String value) { StringBuffer b = new StringBuffer(); for (int i = description.length(); i < DESC_WIDTH; i += 1) { b.append(' '); } b.append(description); b.append(": "); b.append(value != null ? value : ""); System.out.println(b.toString()); } /** * Complete a row by writing a newline character. * * @param csv_file * The CSV file. */ private void csv_row_complete(PrintWriter csv_file) { csv_file.println(); } /** * Print a field to a new CSV column. * * @param csv_file * The CSV file. * @param value * The value to put into the column. */ private void csv_column(PrintWriter csv_file, String value) { csv_file.print(CSV_SEPARATOR_CHAR); csv_file.print(csv_escape(value)); } /** * Print out the first column of a row, containing the description. * * @param csv_file * The {@link PrintWriter} object for producing the CSV file * @param string * The contents of the first column */ private void csv_row_description(PrintWriter csv_file, String string) { String escaped_string = csv_escape(string); csv_file.print(escaped_string); } /** * Escape the string according to the rules for CSV files. * * @param string * The string to escape * * @return The string quoted according to the CSV rules. */ private String csv_escape(String string) { StringBuffer buffer = new StringBuffer(); if (string != null) { boolean must_quote = false; for (int i = 0; i < string.length(); i += 1) { char c = string.charAt(i); switch (c) { case '"': // escape the double quotes buffer.append('"'); case CSV_SEPARATOR_CHAR: case '\n': must_quote = true; } buffer.append(c); } if (must_quote) { buffer.insert(0, '"'); buffer.append('"'); } } return buffer.toString(); } /** * Store the description of a property in the given column of the * schema_header array. * * @param schema_header * The target array * @param column * The column to write to * @param field * The field to store * @param key * The key of a custom property */ private void csv_schema_property(String[][] schema_header, int column, collection_field field, String key) { /* * Save the properties of the field, in the same order as they are * specified in the "row_descriptions" array in * "print_schema_properties". */ int row = 0; schema_header[row++][column] = field.display_name; schema_header[row++][column] = key == null ? "built-in" : "custom"; schema_header[row++][column] = type_names[field.subtype]; schema_header[row++][column] = field.order != -1 ? "" + field.order : ""; schema_header[row++][column] = field.visibility ? "true" : "false"; schema_header[row++][column] = field.editable ? "true" : "false"; } public package_properties(String[] argv, String readable_name, String search_path) { super(argv, readable_name, search_path); } public static void main(String argv[]) { package_properties example = new package_properties(argv, "Package properties", SEARCH_PATH); example.execute(); } }