Home | History | Annotate | Download | only in adaptor
      1 /*
      2  * Copyright (C) 2010 Google Inc.
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  * http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package com.google.clearsilver.jsilver.adaptor;
     18 
     19 import com.google.clearsilver.jsilver.JSilverOptions;
     20 import com.google.clearsilver.jsilver.data.Data;
     21 import com.google.clearsilver.jsilver.data.DataFactory;
     22 import com.google.clearsilver.jsilver.data.Parser;
     23 import com.google.clearsilver.jsilver.exceptions.JSilverBadSyntaxException;
     24 
     25 import org.clearsilver.CSFileLoader;
     26 import org.clearsilver.HDF;
     27 
     28 import java.io.FileWriter;
     29 import java.io.IOException;
     30 import java.io.StringReader;
     31 import java.util.Date;
     32 import java.util.TimeZone;
     33 
     34 /**
     35  * Adaptor that wraps a JSilver Data object so it can be used as an HDF object.
     36  */
     37 public class JHdf implements HDF {
     38 
     39   // Only changed to null on close()
     40   private Data data;
     41   private final DataFactory dataFactory;
     42   private final JSilverOptions options;
     43 
     44   private final LoadPathToFileCache loadPathCache;
     45   private ResourceLoaderAdaptor resourceLoader;
     46 
     47 
     48   JHdf(Data data, DataFactory dataFactory, LoadPathToFileCache loadPathCache, JSilverOptions options) {
     49     this.data = data;
     50     this.loadPathCache = loadPathCache;
     51     this.dataFactory = dataFactory;
     52     this.options = options;
     53     this.resourceLoader = new ResourceLoaderAdaptor(this, loadPathCache, null);
     54   }
     55 
     56   static JHdf cast(HDF hdf) {
     57     if (!(hdf instanceof JHdf)) {
     58       throw new IllegalArgumentException("HDF object not of type JHdf.  "
     59           + "Make sure you use the same ClearsilverFactory to construct all "
     60           + "related HDF and CS objects.");
     61     }
     62     return (JHdf) hdf;
     63   }
     64 
     65   Data getData() {
     66     return data;
     67   }
     68 
     69   ResourceLoaderAdaptor getResourceLoaderAdaptor() {
     70     return resourceLoader;
     71   }
     72 
     73   @Override
     74   public void close() {
     75     // This looks pointless but it actually reduces the lifetime of the large
     76     // Data object as far as the garbage collector is concerned and
     77     // dramatically improves performance.
     78     data = null;
     79   }
     80 
     81   @Override
     82   public boolean readFile(String filename) throws IOException {
     83     dataFactory.loadData(filename, resourceLoader, data);
     84     return false;
     85   }
     86 
     87   @Override
     88   public CSFileLoader getFileLoader() {
     89     return resourceLoader.getCSFileLoader();
     90   }
     91 
     92   @Override
     93   public void setFileLoader(CSFileLoader fileLoader) {
     94     this.resourceLoader = new ResourceLoaderAdaptor(this, loadPathCache, fileLoader);
     95   }
     96 
     97   @Override
     98   public boolean writeFile(String filename) throws IOException {
     99     FileWriter writer = new FileWriter(filename);
    100     try {
    101       data.write(writer, 2);
    102     } finally {
    103       writer.close();
    104     }
    105     return true;
    106   }
    107 
    108   @Override
    109   public boolean readString(String content) {
    110     Parser hdfParser = dataFactory.getParser();
    111     try {
    112       hdfParser.parse(new StringReader(content), data, new Parser.ErrorHandler() {
    113         public void error(int line, String lineContent, String fileName, String errorMessage) {
    114           throw new JSilverBadSyntaxException("HDF parsing error : '" + errorMessage + "'",
    115               lineContent, fileName, line, JSilverBadSyntaxException.UNKNOWN_POSITION, null);
    116         }
    117       }, resourceLoader, null, options.getIgnoreAttributes());
    118       return true;
    119     } catch (IOException e) {
    120       return false;
    121     }
    122   }
    123 
    124   @Override
    125   public int getIntValue(String hdfName, int defaultValue) {
    126     return data.getIntValue(hdfName, defaultValue);
    127   }
    128 
    129   @Override
    130   public String getValue(String hdfName, String defaultValue) {
    131     return data.getValue(hdfName, defaultValue);
    132   }
    133 
    134   @Override
    135   public void setValue(String hdfName, String value) {
    136     data.setValue(hdfName, value);
    137   }
    138 
    139   @Override
    140   public void removeTree(String hdfName) {
    141     data.removeTree(hdfName);
    142   }
    143 
    144   @Override
    145   public void setSymLink(String hdfNameSrc, String hdfNameDest) {
    146     data.setSymlink(hdfNameSrc, hdfNameDest);
    147   }
    148 
    149   @Override
    150   public void exportDate(String hdfName, TimeZone timeZone, Date date) {
    151     throw new UnsupportedOperationException("TBD");
    152   }
    153 
    154   @Override
    155   public void exportDate(String hdfName, String tz, int tt) {
    156     throw new UnsupportedOperationException("TBD");
    157   }
    158 
    159   @Override
    160   public HDF getObj(String hdfpath) {
    161     Data d = data.getChild(hdfpath);
    162     return d == null ? null : new JHdf(d, dataFactory, loadPathCache, options);
    163   }
    164 
    165   @Override
    166   public HDF getChild(String hdfpath) {
    167     Data d = data.getChild(hdfpath);
    168     if (d == null) {
    169       return null;
    170     }
    171     for (Data child : d.getChildren()) {
    172       if (child.isFirstSibling()) {
    173         return new JHdf(child, dataFactory, loadPathCache, options);
    174       } else {
    175         // The first child returned should be the first sibling. Throw an error
    176         // if not.
    177         throw new IllegalStateException("First child was not first sibling.");
    178       }
    179     }
    180     return null;
    181   }
    182 
    183   @Override
    184   public HDF getRootObj() {
    185     Data root = data.getRoot();
    186     if (root == data) {
    187       return this;
    188     } else {
    189       return new JHdf(root, dataFactory, loadPathCache, options);
    190     }
    191   }
    192 
    193   @Override
    194   public boolean belongsToSameRoot(HDF hdf) {
    195     JHdf jHdf = cast(hdf);
    196     return this.data.getRoot() == jHdf.data.getRoot();
    197   }
    198 
    199   @Override
    200   public HDF getOrCreateObj(String hdfpath) {
    201     return new JHdf(data.createChild(hdfpath), dataFactory, loadPathCache, options);
    202   }
    203 
    204   @Override
    205   public String objName() {
    206     return data.getName();
    207   }
    208 
    209   @Override
    210   public String objValue() {
    211     return data.getValue();
    212   }
    213 
    214   @Override
    215   public HDF objChild() {
    216     for (Data child : data.getChildren()) {
    217       if (child.isFirstSibling()) {
    218         return new JHdf(child, dataFactory, loadPathCache, options);
    219       }
    220     }
    221     return null;
    222   }
    223 
    224   @Override
    225   public HDF objNext() {
    226     Data next = data.getNextSibling();
    227     return next == null ? null : new JHdf(next, dataFactory, loadPathCache, options);
    228   }
    229 
    230   @Override
    231   public void copy(String hdfpath, HDF src) {
    232     JHdf srcJHdf = cast(src);
    233     if (hdfpath.equals("")) {
    234       data.copy(srcJHdf.data);
    235     } else {
    236       data.copy(hdfpath, srcJHdf.data);
    237     }
    238   }
    239 
    240   @Override
    241   public String dump() {
    242     StringBuilder sb = new StringBuilder();
    243     try {
    244       data.write(sb, 0);
    245       return sb.toString();
    246     } catch (IOException e) {
    247       return null;
    248     }
    249   }
    250 
    251   @Override
    252   public String writeString() {
    253     return dump();
    254   }
    255 
    256   @Override
    257   public String toString() {
    258     return dump();
    259   }
    260 
    261   /**
    262    * JSilver-specific method that optimizes the underlying data object. Should only be used on
    263    * long-lived HDF objects (e.g. global HDF).
    264    */
    265   public void optimize() {
    266     data.optimize();
    267   }
    268 }
    269