001/*- 002 ******************************************************************************* 003 * Copyright (c) 2011, 2016 Diamond Light Source Ltd. 004 * All rights reserved. This program and the accompanying materials 005 * are made available under the terms of the Eclipse Public License v1.0 006 * which accompanies this distribution, and is available at 007 * http://www.eclipse.org/legal/epl-v10.html 008 * 009 * Contributors: 010 * Peter Chang - initial API and implementation and/or initial documentation 011 *******************************************************************************/ 012 013package org.eclipse.january.dataset; 014 015import java.io.Serializable; 016import java.lang.annotation.Annotation; 017import java.lang.reflect.Array; 018import java.lang.reflect.Field; 019import java.util.ArrayList; 020import java.util.Arrays; 021import java.util.Collection; 022import java.util.List; 023import java.util.Map; 024import java.util.concurrent.ConcurrentHashMap; 025import java.util.concurrent.ConcurrentMap; 026 027import org.eclipse.january.DatasetException; 028import org.eclipse.january.MetadataException; 029import org.eclipse.january.metadata.Dirtiable; 030import org.eclipse.january.metadata.ErrorMetadata; 031import org.eclipse.january.metadata.IMetadata; 032import org.eclipse.january.metadata.MetadataFactory; 033import org.eclipse.january.metadata.MetadataType; 034import org.eclipse.january.metadata.Reshapeable; 035import org.eclipse.january.metadata.Sliceable; 036import org.eclipse.january.metadata.Transposable; 037import org.slf4j.Logger; 038import org.slf4j.LoggerFactory; 039 040/** 041 * Common base for both lazy and normal dataset implementations 042 */ 043public abstract class LazyDatasetBase implements ILazyDataset, Serializable { 044 045 private static final long serialVersionUID = 767926846438976050L; 046 047 private static final Logger logger = LoggerFactory.getLogger(LazyDatasetBase.class); 048 049 transient private boolean dirty = true; // indicate dirty state of metadata 050 protected String name = ""; 051 052 /** 053 * The shape or dimensions of the dataset 054 */ 055 protected int[] shape; 056 057 protected ConcurrentMap<Class<? extends MetadataType>, List<MetadataType>> metadata = null; 058 059 /** 060 * @return type of dataset item 061 */ 062 abstract public int getDType(); 063 064 @Override 065 public LazyDatasetBase clone() { 066 return null; 067 } 068 069 @Override 070 public boolean equals(Object obj) { 071 if (this == obj) { 072 return true; 073 } 074 if (obj == null) { 075 return false; 076 } 077 if (!getClass().equals(obj.getClass())) { 078 return false; 079 } 080 081 LazyDatasetBase other = (LazyDatasetBase) obj; 082 if (getDType() != other.getDType()) { 083 return false; 084 } 085 if (getElementsPerItem() != other.getElementsPerItem()) { 086 return false; 087 } 088 if (!Arrays.equals(shape, other.shape)) { 089 return false; 090 } 091 return true; 092 } 093 094 @Override 095 public int hashCode() { 096 int hash = getDType() * 17 + getElementsPerItem(); 097 int rank = shape.length; 098 for (int i = 0; i < rank; i++) { 099 hash = hash*17 + shape[i]; 100 } 101 return hash; 102 } 103 104 @Override 105 public String getName() { 106 return name; 107 } 108 109 @Override 110 public void setName(String name) { 111 this.name = name; 112 } 113 114 @Override 115 public int[] getShape() { 116 return shape.clone(); 117 } 118 119 @Override 120 public int getRank() { 121 return shape.length; 122 } 123 124 /** 125 * This method allows anything that dirties the dataset to clear various metadata values 126 * so that the other methods can work correctly. 127 * @since 2.1 128 */ 129 public void setDirty() { 130 dirty = true; 131 } 132 133 /** 134 * Check if slice is compatible with dataset's shape 135 * @param slice to check 136 */ 137 protected void checkSliceND(SliceND slice) { 138 slice.checkShapes(shape, null); 139 } 140 141 /** 142 * Find first sub-interface of (or class that directly implements) MetadataType 143 * @param clazz metadata type 144 * @return sub-interface 145 * @exception IllegalArgumentException when given class is {@link MetadataType} or an anonymous sub-class of it 146 */ 147 @SuppressWarnings("unchecked") 148 public static Class<? extends MetadataType> findMetadataTypeSubInterfaces(Class<? extends MetadataType> clazz) { 149 if (clazz.equals(MetadataType.class)) { 150 throw new IllegalArgumentException("Cannot accept MetadataType"); 151 } 152 153 if (clazz.isInterface()) { 154 return clazz; 155 } 156 157 if (clazz.isAnonymousClass()) { // special case 158 Class<?> s = clazz.getSuperclass(); 159 if (!s.equals(Object.class)) { 160 // only use super class if it is not an anonymous class of an interface 161 clazz = (Class<? extends MetadataType>) s; 162 } 163 } 164 165 for (Class<?> c : clazz.getInterfaces()) { 166 if (c.equals(MetadataType.class)) { 167 if (clazz.isAnonymousClass()) { 168 throw new IllegalArgumentException("Cannot accept anonymous subclasses of MetadataType"); 169 } 170 return clazz; 171 } 172 if (MetadataType.class.isAssignableFrom(c)) { 173 return (Class<? extends MetadataType>) c; 174 } 175 } 176 177 Class<?> c = clazz.getSuperclass(); // Naughty: someone has sub-classed a metadata class 178 if (c != null) { 179 return findMetadataTypeSubInterfaces((Class<? extends MetadataType>) c); 180 } 181 182 logger.error("Somehow the search for metadata type interface ended in a bad place"); 183 assert false; // should not be able to get here!!! 184 return null; 185 } 186 187 @Override 188 public void setMetadata(MetadataType metadata) { 189 addMetadata(metadata, true); 190 } 191 192 @Override 193 public void addMetadata(MetadataType metadata) { 194 addMetadata(metadata, false); 195 } 196 197 private synchronized void addMetadata(MetadataType metadata, boolean clear) { 198 if (metadata == null) { 199 return; 200 } 201 202 if (this.metadata == null) { 203 this.metadata = new ConcurrentHashMap<Class<? extends MetadataType>, List<MetadataType>>(); 204 } 205 206 Class<? extends MetadataType> clazz = findMetadataTypeSubInterfaces(metadata.getClass()); 207 if (!this.metadata.containsKey(clazz)) { 208 this.metadata.put(clazz, new ArrayList<MetadataType>()); 209 } else if (clear) { 210 this.metadata.get(clazz).clear(); 211 } 212 this.metadata.get(clazz).add(metadata); 213 214 // add for special case of sub-interfaces of IMetadata 215 if (!IMetadata.class.equals(clazz) && IMetadata.class.isAssignableFrom(clazz)) { 216 clazz = IMetadata.class; 217 if (!this.metadata.containsKey(clazz)) { 218 this.metadata.put(clazz, new ArrayList<MetadataType>()); 219 } else if (clear) { 220 this.metadata.get(clazz).clear(); 221 } 222 this.metadata.get(clazz).add(metadata); 223 } 224 } 225 226 @Override 227 @Deprecated 228 public synchronized IMetadata getMetadata() { 229 return getFirstMetadata(IMetadata.class); 230 } 231 232 @SuppressWarnings("unchecked") 233 @Override 234 public synchronized <T extends MetadataType> List<T> getMetadata(Class<T> clazz) throws MetadataException { 235 if (metadata == null) { 236 dirty = false; 237 return null; 238 } 239 240 if (dirty) { 241 dirtyMetadata(); 242 dirty = false; 243 } 244 245 if (clazz == null) { 246 List<T> all = new ArrayList<>(); 247 for (Class<? extends MetadataType> c : metadata.keySet()) { 248 all.addAll((Collection<T>) metadata.get(c)); 249 } 250 return all; 251 } 252 253 return (List<T>) metadata.get(findMetadataTypeSubInterfaces(clazz)); 254 } 255 256 @Override 257 public synchronized <T extends MetadataType> T getFirstMetadata(Class<T> clazz) { 258 try { 259 List<T> ml = getMetadata(clazz); 260 if (ml == null) { 261 return null; 262 } 263 for (T t : ml) { 264 if (clazz.isInstance(t)) { 265 return t; 266 } 267 } 268 } catch (Exception e) { 269 logger.error("Get metadata failed!",e); 270 } 271 272 return null; 273 } 274 275 @Override 276 public synchronized void clearMetadata(Class<? extends MetadataType> clazz) { 277 if (metadata == null) { 278 return; 279 } 280 281 if (clazz == null) { 282 metadata.clear(); 283 return; 284 } 285 286 List<MetadataType> list = metadata.get(findMetadataTypeSubInterfaces(clazz)); 287 if( list != null) { 288 list.clear(); 289 } 290 } 291 292 /** 293 * @return copy of metadata 294 * @since 2.0 295 */ 296 protected synchronized ConcurrentMap<Class<? extends MetadataType>, List<MetadataType>> copyMetadata() { 297 return copyMetadata(metadata); 298 } 299 300 /** 301 * @param metadata type 302 * @return copy of metadata of given type 303 * @since 2.0 304 */ 305 protected static ConcurrentMap<Class<? extends MetadataType>, List<MetadataType>> copyMetadata(Map<Class<? extends MetadataType>, List<MetadataType>> metadata) { 306 if (metadata == null) { 307 return null; 308 } 309 310 ConcurrentHashMap<Class<? extends MetadataType>, List<MetadataType>> map = new ConcurrentHashMap<Class<? extends MetadataType>, List<MetadataType>>(); 311 copyMetadata(metadata, map); 312 return map; 313 } 314 315 private static void copyMetadata(Map<Class<? extends MetadataType>, List<MetadataType>> inMetadata, 316 Map<Class<? extends MetadataType>, List<MetadataType>> outMetadata) { 317 for (Class<? extends MetadataType> c : inMetadata.keySet()) { 318 List<MetadataType> l = inMetadata.get(c); 319 List<MetadataType> nl = new ArrayList<MetadataType>(l.size()); 320 outMetadata.put(c, nl); 321 for (MetadataType m : l) { 322 if (m == null || isMetadataDirty(m)) { // skip dirty metadata 323 continue; 324 } 325 nl.add(m.clone()); 326 } 327 } 328 } 329 330 protected void restoreMetadata(Map<Class<? extends MetadataType>, List<MetadataType>> oldMetadata) { 331 copyMetadata(oldMetadata, metadata); 332 } 333 334 /** 335 * @param a dataset 336 * @param clone if true, copy metadata 337 * @return copy of metadata 338 * @since 2.2 339 */ 340 protected static ConcurrentMap<Class<? extends MetadataType>, List<MetadataType>> getMetadataMap(ILazyDataset a, boolean clone) { 341 List<MetadataType> all = null; 342 try { 343 all = a.getMetadata(null); 344 } catch (Exception e) { 345 } 346 if (all == null) { 347 return null; 348 } 349 350 ConcurrentMap<Class<? extends MetadataType>, List<MetadataType>> map = new ConcurrentHashMap<Class<? extends MetadataType>, List<MetadataType>>(); 351 352 for (MetadataType m : all) { 353 if (m == null || isMetadataDirty(m)) { // skip dirty metadata 354 continue; 355 } 356 Class<? extends MetadataType> c = findMetadataTypeSubInterfaces(m.getClass()); 357 List<MetadataType> l = map.get(c); 358 if (l == null) { 359 l = new ArrayList<MetadataType>(); 360 map.put(c, l); 361 } 362 if (clone) { 363 m = m.clone(); 364 } 365 l.add(m); 366 } 367 return map; 368 } 369 370 private static boolean isMetadataDirty(MetadataType m) { 371 Class<? extends MetadataType> c = m.getClass(); 372 for (Field f : c.getDeclaredFields()) { 373 if (f.isAnnotationPresent(Dirtiable.class)) { 374 Class<?> t = f.getType(); 375 if (t.equals(boolean.class) || t.equals(Boolean.class)) { 376 try { 377 f.setAccessible(true); 378 Object o = f.get(m); 379 if (o.equals(true)) { 380 return true; 381 } 382 } catch (Exception e) { 383 logger.debug("Could not retrieve value of dirty variable: {}", c.getCanonicalName(), e); 384 } 385 } 386 } 387 } 388 389 return false; 390 } 391 392 interface MetadatasetAnnotationOperation { 393 /** 394 * Process value of given field 395 * <p> 396 * When the field is not a container then the returned value 397 * may replace the old value 398 * @param f given field 399 * @param o value of field 400 * @return transformed field 401 */ 402 Object processField(Field f, Object o); 403 404 /** 405 * @return annotated class 406 */ 407 Class<? extends Annotation> getAnnClass(); 408 409 /** 410 * @param axis 411 * @return number of dimensions to insert or remove 412 */ 413 int change(int axis); 414 415 /** 416 * 417 * @return rank or -1 to match 418 */ 419 int getNewRank(); 420 421 /** 422 * Run on given lazy dataset 423 * @param lz 424 * @return 425 */ 426 ILazyDataset run(ILazyDataset lz); 427 } 428 429 class MdsSlice implements MetadatasetAnnotationOperation { 430 private boolean asView; 431 private SliceND slice; 432 private int[] oShape; 433 private long oSize; 434 435 public MdsSlice(boolean asView, SliceND slice) { 436 this.asView = asView; 437 this.slice = slice; 438 oShape = slice.getSourceShape(); 439 oSize = ShapeUtils.calcLongSize(oShape); 440 } 441 442 @Override 443 public Object processField(Field field, Object o) { 444 return o; 445 } 446 447 @Override 448 public Class<? extends Annotation> getAnnClass() { 449 return Sliceable.class; 450 } 451 452 @Override 453 public int change(int axis) { 454 return 0; 455 } 456 457 @Override 458 public int getNewRank() { 459 return -1; 460 } 461 462 @Override 463 public ILazyDataset run(ILazyDataset lz) { 464 int rank = lz.getRank(); 465 if (slice.getStart().length != rank) { 466 throw new IllegalArgumentException("Slice rank does not match dataset!"); 467 } 468 469 int[] shape = lz.getShape(); 470 SliceND nslice; 471 if (!ShapeUtils.areShapesBroadcastCompatible(oShape, shape)) { 472 nslice = new SliceND(shape); 473 for (int i = 0; i < rank; i++) { 474 int s = shape[i]; 475 int os = oShape[i]; 476 if (s >= os) { 477 nslice.setSlice(i, 0, os, 1); 478 } else if (s == 1) { 479 nslice.setSlice(i, 0, 1, 1); 480 } else { 481 throw new IllegalArgumentException("Sliceable dataset has non-unit dimension less than host!"); 482 } 483 } 484 lz = lz.getSliceView(nslice); 485 shape = nslice.getShape(); 486 } 487 if (lz.getSize() == oSize && Arrays.equals(shape, oShape)) { 488 nslice = slice; 489 } else { 490 nslice = slice.clone(); 491 for (int i = 0; i < rank; i++) { 492 int s = shape[i]; 493 if (s >= oShape[i]) { 494 continue; 495 } else if (s == 1) { 496 nslice.setSlice(i, 0, 1, 1); 497 } else { 498 throw new IllegalArgumentException("Sliceable dataset has non-unit dimension less than host!"); 499 } 500 } 501 nslice.updateSourceShape(shape); 502 } 503 504 if (asView || (lz instanceof IDataset)) { 505 return lz.getSliceView(nslice); 506 } 507 try { 508 return lz.getSlice(nslice); 509 } catch (DatasetException e) { 510 logger.error("Could not slice dataset in metadata", e); 511 return null; 512 } 513 } 514 } 515 516 class MdsReshape implements MetadatasetAnnotationOperation { 517 private boolean matchRank; 518 private int[] oldShape; 519 private int[] newShape; 520 boolean onesOnly; 521 int[] differences; 522 523 /* 524 * if only ones then record differences (insertions and deletions) 525 * 526 * if shape changing, find broadcasted dimensions and disallow 527 * merging that include those dimensions 528 */ 529 public MdsReshape(final int[] oldShape, final int[] newShape) { 530 this.oldShape = oldShape; 531 this.newShape = newShape; 532 differences = null; 533 } 534 535 @Override 536 public Object processField(Field field, Object o) { 537 Annotation a = field.getAnnotation(Reshapeable.class); 538 if (a != null) { // cannot be null 539 matchRank = ((Reshapeable) a).matchRank(); 540 } 541 return o; 542 } 543 544 @Override 545 public Class<? extends Annotation> getAnnClass() { 546 return Reshapeable.class; 547 } 548 549 @Override 550 public int change(int axis) { 551 if (matchRank) { 552 if (differences == null) { 553 init(); 554 } 555 556 if (onesOnly) { 557 return differences == null ? 0 : differences[axis]; 558 } 559 throw new UnsupportedOperationException("TODO support other shape operations"); 560 } 561 return 0; 562 } 563 564 @Override 565 public int getNewRank() { 566 return matchRank ? newShape.length : -1; 567 } 568 569 private void init() { 570 int or = oldShape.length - 1; 571 int nr = newShape.length - 1; 572 if (or < 0 || nr < 0) { // zero-rank shapes 573 onesOnly = true; 574 differences = new int[1]; 575 differences[0] = or < 0 ? nr + 1 : or + 1; 576 return; 577 } 578 onesOnly = ShapeUtils.differsByOnes(oldShape, newShape); 579 int ob = 0; 580 int nb = 0; 581 if (onesOnly) { 582 differences = ShapeUtils.calcShapePadding(oldShape, newShape); 583 } else { 584 differences = new int[or + 2]; 585 if (matchRank) { 586 logger.error("Combining dimensions is currently not supported"); 587 throw new IllegalArgumentException("Combining dimensions is currently not supported"); 588 } 589 // work out mapping: contiguous dimensions can be grouped or split 590 while (ob <= or && nb <= nr) { 591 int ol = oldShape[ob]; 592 while (ol == 1 && ol <= or) { 593 ob++; 594 ol = oldShape[ob]; 595 } 596 int oe = ob + 1; 597 int nl = newShape[nb]; 598 while (nl == 1 && nl <= nr) { 599 nb++; 600 nl = newShape[nb]; 601 } 602 int ne = nb + 1; 603 if (ol < nl) { 604 differences[ob] = 1; 605 do { // case where new shape combines several dimensions into one dimension 606 if (oe == (or + 1)) { 607 break; 608 } 609 differences[oe] = 1; 610 ol *= oldShape[oe++]; 611 } while (ol < nl); 612 differences[oe - 1] = oe - ob; // signal end with difference 613 if (nl != ol) { 614 logger.error("Single dimension is incompatible with subshape"); 615 throw new IllegalArgumentException("Single dimension is incompatible with subshape"); 616 } 617 } else if (ol > nl) { 618 do { // case where new shape spreads single dimension over several dimensions 619 if (ne == (nr + 1)) { 620 break; 621 } 622 nl *= newShape[ne++]; 623 } while (nl < ol); 624 if (nl != ol) { 625 logger.error("Subshape is incompatible with single dimension"); 626 throw new IllegalArgumentException("Subshape is incompatible with single dimension"); 627 } 628 } 629 630 ob = oe; 631 nb = ne; 632 } 633 } 634 } 635 636 @Override 637 public ILazyDataset run(ILazyDataset lz) { 638 if (differences == null) { 639 init(); 640 } 641 642 int[] lshape = lz.getShape(); 643 if (Arrays.equals(newShape, lshape)) { 644 return lz; 645 } 646 int or = lshape.length; 647 int nr = newShape.length; 648 int[] nshape; 649 if (onesOnly) { 650 nshape = ShapeUtils.padShape(differences, nr, lshape); 651 } else { 652 nshape = new int[nr]; 653 boolean[] broadcast = new boolean[or]; 654 for (int ob = 0; ob < or; ob++) { 655 broadcast[ob] = oldShape[ob] != 1 && lshape[ob] == 1; 656 } 657 int osize = lz.getSize(); 658 659 // cannot do 3x5x... to 15x... if metadata is broadcasting (i.e. 1x5x...) 660 int ob = 0; 661 int nsize = 1; 662 for (int i = 0; i < nr; i++) { 663 if (ob < or && broadcast[ob]) { 664 if (differences[ob] != 0) { 665 logger.error("Metadata contains a broadcast axis which cannot be reshaped"); 666 throw new IllegalArgumentException("Metadata contains a broadcast axis which cannot be reshaped"); 667 } 668 nshape[i] = 1; 669 } else { 670 nshape[i] = nsize < osize ? newShape[i] : 1; 671 } 672 nsize *= nshape[i]; 673 ob++; 674 } 675 } 676 677 ILazyDataset nlz; 678 if (lz instanceof Dataset) { 679 nlz = ((Dataset) lz).reshape(nshape); 680 } else { 681 nlz = lz.getSliceView(); 682 nlz.setShape(nshape); 683 } 684 return nlz; 685 } 686 } 687 688 class MdsTranspose implements MetadatasetAnnotationOperation { 689 int[] map; 690 691 public MdsTranspose(final int[] axesMap) { 692 map = axesMap; 693 } 694 695 @SuppressWarnings({ "rawtypes", "unchecked" }) 696 @Override 697 public Object processField(Field f, Object o) { 698 // reorder arrays and lists according the axes map 699 if (o.getClass().isArray()) { 700 int l = Array.getLength(o); 701 if (l == map.length) { 702 Object narray = Array.newInstance(o.getClass().getComponentType(), l); 703 for (int i = 0; i < l; i++) { 704 Array.set(narray, i, Array.get(o, map[i])); 705 } 706 for (int i = 0; i < l; i++) { 707 Array.set(o, i, Array.get(narray, i)); 708 } 709 } 710 } else if (o instanceof List<?>) { 711 List list = (List) o; 712 int l = list.size(); 713 if (l == map.length) { 714 Object narray = Array.newInstance(o.getClass().getComponentType(), l); 715 for (int i = 0; i < l; i++) { 716 Array.set(narray, i, list.get(map[i])); 717 } 718 list.clear(); 719 for (int i = 0; i < l; i++) { 720 list.add(Array.get(narray, i)); 721 } 722 } 723 } 724 return o; 725 } 726 727 @Override 728 public Class<? extends Annotation> getAnnClass() { 729 return Transposable.class; 730 } 731 732 @Override 733 public int change(int axis) { 734 return 0; 735 } 736 737 @Override 738 public int getNewRank() { 739 return -1; 740 } 741 742 @Override 743 public ILazyDataset run(ILazyDataset lz) { 744 return lz.getTransposedView(map); 745 } 746 } 747 748 class MdsDirty implements MetadatasetAnnotationOperation { 749 750 @Override 751 public Object processField(Field f, Object o) { 752 // throw exception if not boolean??? 753 Class<?> t = f.getType(); 754 if (t.equals(boolean.class) || t.equals(Boolean.class)) { 755 if (o.equals(false)) { 756 o = true; 757 } 758 } 759 return o; 760 } 761 762 @Override 763 public Class<? extends Annotation> getAnnClass() { 764 return Dirtiable.class; 765 } 766 767 @Override 768 public int change(int axis) { 769 return 0; 770 } 771 772 @Override 773 public int getNewRank() { 774 return -1; 775 } 776 777 @Override 778 public ILazyDataset run(ILazyDataset lz) { 779 return lz; 780 } 781 } 782 783 /** 784 * Slice all datasets in metadata that are annotated by @Sliceable. Call this on the new sliced 785 * dataset after cloning the metadata 786 * @param asView if true then just a view 787 * @param slice an n-D slice 788 */ 789 protected void sliceMetadata(boolean asView, final SliceND slice) { 790 processAnnotatedMetadata(new MdsSlice(asView, slice)); 791 } 792 793 /** 794 * Reshape all datasets in metadata that are annotated by @Reshapeable. Call this when squeezing 795 * or setting the shape 796 * @param oldShape old shape 797 * @param newShape new shape 798 */ 799 protected void reshapeMetadata(final int[] oldShape, final int[] newShape) { 800 processAnnotatedMetadata(new MdsReshape(oldShape, newShape)); 801 } 802 803 /** 804 * Transpose all datasets in metadata that are annotated by @Transposable. Call this on the transposed 805 * dataset after cloning the metadata 806 * @param axesMap if zero length then axes order reversed 807 */ 808 protected void transposeMetadata(final int[] axesMap) { 809 processAnnotatedMetadata(new MdsTranspose(axesMap)); 810 } 811 812 /** 813 * Dirty metadata that are annotated by @Dirtiable. Call this when the dataset has been modified 814 * @since 2.0 815 */ 816 protected void dirtyMetadata() { 817 processAnnotatedMetadata(new MdsDirty()); 818 } 819 820 @SuppressWarnings("unchecked") 821 private void processAnnotatedMetadata(MetadatasetAnnotationOperation op) { 822 if (metadata == null) 823 return; 824 825 for (List<MetadataType> l : metadata.values()) { 826 for (MetadataType m : l) { 827 if (m == null) { 828 continue; 829 } 830 831 Class<? extends MetadataType> mc = m.getClass(); 832 do { // iterate over super-classes 833 processClass(op, m, mc); 834 Class<?> sclazz = mc.getSuperclass(); 835 if (!MetadataType.class.isAssignableFrom(sclazz)) { 836 break; 837 } 838 mc = (Class<? extends MetadataType>) sclazz; 839 } while (true); 840 } 841 } 842 } 843 844 @SuppressWarnings({ "unchecked", "rawtypes" }) 845 private static void processClass(MetadatasetAnnotationOperation op, MetadataType m, Class<? extends MetadataType> mc) { 846 for (Field f : mc.getDeclaredFields()) { 847 if (!f.isAnnotationPresent(op.getAnnClass())) 848 continue; 849 850 try { 851 f.setAccessible(true); 852 Object o = f.get(m); 853 if (o == null) { 854 continue; 855 } 856 857 Object no = op.processField(f, o); 858 if (no != o) { 859 f.set(m, no); 860 continue; 861 } 862 Object r = null; 863 if (o instanceof ILazyDataset) { 864 try { 865 f.set(m, op.run((ILazyDataset) o)); 866 } catch (Exception e) { 867 logger.error("Problem processing " + o, e); 868 throw e; 869 } 870 } else if (o.getClass().isArray()) { 871 int l = Array.getLength(o); 872 873 for (int i = 0; r == null && i < l; i++) { 874 r = Array.get(o, i); 875 } 876 int n = op.getNewRank(); 877 if (r == null) { 878 if (n < 0 || n != l) { // all nulls be need to match rank as necessary 879 f.set(m, Array.newInstance(o.getClass().getComponentType(), n < 0 ? l : n)); 880 } 881 continue; 882 } 883 if (n < 0) { 884 n = l; 885 } 886 Object narray = Array.newInstance(r.getClass(), n); 887 for (int i = 0, si = 0, di = 0; di < n && si < l; i++) { 888 int c = op.change(i); 889 if (c == 0) { 890 Array.set(narray, di++, processObject(op, Array.get(o, si++))); 891 } else if (c > 0) { 892 di += c; // add nulls by skipping forward in destination array 893 } else if (c < 0) { 894 si -= c; // remove dimensions by skipping forward in source array 895 } 896 } 897 if (n == l) { 898 for (int i = 0; i < l; i++) { 899 Array.set(o, i, Array.get(narray, i)); 900 } 901 } else { 902 f.set(m, narray); 903 } 904 } else if (o instanceof List<?>) { 905 List list = (List) o; 906 int l = list.size(); 907 908 for (int i = 0; r == null && i < l; i++) { 909 r = list.get(i); 910 } 911 int n = op.getNewRank(); 912 if (r == null) { 913 if (n < 0 || n != l) { // all nulls be need to match rank as necessary 914 list.clear(); 915 for (int i = 0, imax = n < 0 ? l : n; i < imax; i++) { 916 list.add(null); 917 } 918 } 919 continue; 920 } 921 922 if (n < 0) { 923 n = l; 924 } 925 Object narray = Array.newInstance(r.getClass(), n); 926 for (int i = 0, si = 0, di = 0; i < l && si < l; i++) { 927 int c = op.change(i); 928 if (c == 0) { 929 Array.set(narray, di++, processObject(op, list.get(si++))); 930 } else if (c > 0) { 931 di += c; // add nulls by skipping forward in destination array 932 } else if (c < 0) { 933 si -= c; // remove dimensions by skipping forward in source array 934 } 935 } 936 list.clear(); 937 for (int i = 0; i < n; i++) { 938 list.add(Array.get(narray, i)); 939 } 940 } else if (o instanceof Map<?,?>) { 941 Map map = (Map) o; 942 for (Object k : map.keySet()) { 943 map.put(k, processObject(op, map.get(k))); 944 } 945 } 946 } catch (Exception e) { 947 logger.error("Problem occurred when processing metadata of class {}: {}", mc.getCanonicalName(), e); 948 throw new RuntimeException(e); 949 } 950 } 951 } 952 953 @SuppressWarnings({ "unchecked", "rawtypes" }) 954 private static Object processObject(MetadatasetAnnotationOperation op, Object o) throws Exception { 955 if (o == null) { 956 return o; 957 } 958 959 if (o instanceof ILazyDataset) { 960 try { 961 return op.run((ILazyDataset) o); 962 } catch (Exception e) { 963 logger.error("Problem processing " + o, e); 964 throw e; 965 } 966 } else if (o.getClass().isArray()) { 967 int l = Array.getLength(o); 968 for (int i = 0; i < l; i++) { 969 Array.set(o, i, processObject(op, Array.get(o, i))); 970 } 971 } else if (o instanceof List<?>) { 972 List list = (List) o; 973 for (int i = 0, imax = list.size(); i < imax; i++) { 974 list.set(i, processObject(op, list.get(i))); 975 } 976 } else if (o instanceof Map<?,?>) { 977 Map map = (Map) o; 978 for (Object k : map.keySet()) { 979 map.put(k, processObject(op, map.get(k))); 980 } 981 } 982 return o; 983 } 984 985 protected ILazyDataset createFromSerializable(Serializable blob, boolean keepLazy) { 986 ILazyDataset d = null; 987 if (blob instanceof ILazyDataset) { 988 d = (ILazyDataset) blob; 989 if (d instanceof IDataset) { 990 Dataset ed = DatasetUtils.convertToDataset((IDataset) d); 991 int is = ed.getElementsPerItem(); 992 if (is != 1 && is != getElementsPerItem()) { 993 throw new IllegalArgumentException("Dataset has incompatible number of elements with this dataset"); 994 } 995 d = ed.cast(is == 1 ? DoubleDataset.class: CompoundDoubleDataset.class); 996 } else if (!keepLazy) { 997 final int is = getElementsPerItem(); 998 try { 999 d = DatasetUtils.cast(is == 1 ? DoubleDataset.class: CompoundDoubleDataset.class, d.getSlice()); 1000 } catch (DatasetException e) { 1001 logger.error("Could not get data from lazy dataset", e); 1002 return null; 1003 } 1004 } 1005 } else { 1006 final int is = getElementsPerItem(); 1007 if (is == 1) { 1008 d = DatasetFactory.createFromObject(DoubleDataset.class, blob); 1009 } else { 1010 try { 1011 d = DatasetFactory.createFromObject(is, CompoundDoubleDataset.class, blob); 1012 } catch (IllegalArgumentException e) { // if only single value supplied try again 1013 d = DatasetFactory.createFromObject(DoubleDataset.class, blob); 1014 } 1015 } 1016 if (d.getSize() == getSize() && !Arrays.equals(d.getShape(), shape)) { 1017 d.setShape(shape.clone()); 1018 } 1019 } 1020 List<int[]> s = BroadcastUtils.broadcastShapesToMax(shape, d.getShape()); 1021 d.setShape(s.get(0)); 1022 1023 return d; 1024 } 1025 1026 @Override 1027 public void setErrors(Serializable errors) { 1028 if (shape == null) { 1029 throw new IllegalArgumentException("Cannot set errors for null dataset"); 1030 } 1031 if (errors == null) { 1032 clearMetadata(ErrorMetadata.class); 1033 return; 1034 } 1035 if (errors == this) { 1036 logger.warn("Ignoring setting error to itself as this will lead to infinite recursion"); 1037 return; 1038 } 1039 1040 ILazyDataset errorData = createFromSerializable(errors, true); 1041 1042 ErrorMetadata emd = getErrorMetadata(); 1043 if (emd == null) { 1044 try { 1045 emd = MetadataFactory.createMetadata(ErrorMetadata.class); 1046 setMetadata(emd); 1047 } catch (MetadataException me) { 1048 logger.error("Could not create metadata", me); 1049 } 1050 } 1051 emd.setError(errorData); 1052 } 1053 1054 protected ErrorMetadata getErrorMetadata() { 1055 try { 1056 List<ErrorMetadata> el = getMetadata(ErrorMetadata.class); 1057 if (el != null && !el.isEmpty()) { 1058 return el.get(0); 1059 } 1060 } catch (Exception e) { 1061 } 1062 return null; 1063 } 1064 1065 @Override 1066 public ILazyDataset getErrors() { 1067 ErrorMetadata emd = getErrorMetadata(); 1068 return emd == null ? null : emd.getError(); 1069 } 1070 1071 @Override 1072 public boolean hasErrors() { 1073 return LazyDatasetBase.this.getErrors() != null; 1074 } 1075 1076 /** 1077 * Check permutation axes 1078 * @param shape to use 1079 * @param axes if zero length then axes order reversed 1080 * @return cleaned up copy of axes or null if trivial 1081 */ 1082 public static int[] checkPermutatedAxes(int[] shape, int... axes) { 1083 int rank = shape == null ? 0 : shape.length; 1084 1085 if (axes == null || axes.length == 0) { 1086 axes = new int[rank]; 1087 for (int i = 0; i < rank; i++) { 1088 axes[i] = rank - 1 - i; 1089 } 1090 } else { 1091 axes = axes.clone(); 1092 } 1093 1094 if (axes.length != rank) { 1095 logger.error("axis permutation has length {} that does not match dataset's rank {}", axes.length, rank); 1096 throw new IllegalArgumentException("axis permutation does not match shape of dataset"); 1097 } 1098 1099 // check all permutation values are within bounds 1100 for (int i = 0; i < rank; i++) { 1101 axes[i] = ShapeUtils.checkAxis(rank, axes[i]); 1102 } 1103 1104 // check for a valid permutation (is this an unnecessary restriction?) 1105 int[] perm = axes.clone(); 1106 Arrays.sort(perm); 1107 1108 for (int i = 0; i < rank; i++) { 1109 if (perm[i] != i) { 1110 logger.error("axis permutation is not valid: it does not contain complete set of axes"); 1111 throw new IllegalArgumentException("axis permutation does not contain complete set of axes"); 1112 } 1113 } 1114 1115 if (Arrays.equals(axes, perm)) { 1116 return null; // signal identity or trivial permutation 1117 } 1118 1119 return axes; 1120 } 1121}